commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
87f44bb68af64f2654c68fb60bf93a34ac6095a6
|
pylearn2/scripts/dbm/dbm_metrics.py
|
pylearn2/scripts/dbm/dbm_metrics.py
|
#!/usr/bin/env python
import argparse
if __name__ == '__main__':
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument("metric", help="the desired metric",
choices=["ais"])
parser.add_argument("model_path", help="path to the pickled DBM model")
args = parser.parse_args()
metric = args.metric
model_path = args.model_path
|
#!/usr/bin/env python
import argparse
from pylearn2.utils import serial
def compute_ais(model):
pass
if __name__ == '__main__':
# Possible metrics
metrics = {'ais': compute_ais}
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument("metric", help="the desired metric",
choices=metrics.keys())
parser.add_argument("model_path", help="path to the pickled DBM model")
args = parser.parse_args()
metric = metrics[args.metric]
model = serial.load(args.model_path)
metric(model)
|
Make the script recuperate the correct method
|
Make the script recuperate the correct method
|
Python
|
bsd-3-clause
|
fyffyt/pylearn2,daemonmaker/pylearn2,se4u/pylearn2,hyqneuron/pylearn2-maxsom,abergeron/pylearn2,fyffyt/pylearn2,skearnes/pylearn2,w1kke/pylearn2,matrogers/pylearn2,TNick/pylearn2,msingh172/pylearn2,shiquanwang/pylearn2,pkainz/pylearn2,fishcorn/pylearn2,chrish42/pylearn,kose-y/pylearn2,Refefer/pylearn2,lunyang/pylearn2,woozzu/pylearn2,abergeron/pylearn2,nouiz/pylearn2,pombredanne/pylearn2,pombredanne/pylearn2,skearnes/pylearn2,CIFASIS/pylearn2,lancezlin/pylearn2,fishcorn/pylearn2,fulmicoton/pylearn2,jamessergeant/pylearn2,hyqneuron/pylearn2-maxsom,ddboline/pylearn2,bartvm/pylearn2,ashhher3/pylearn2,alexjc/pylearn2,aalmah/pylearn2,ddboline/pylearn2,TNick/pylearn2,hyqneuron/pylearn2-maxsom,fulmicoton/pylearn2,msingh172/pylearn2,lancezlin/pylearn2,hantek/pylearn2,JesseLivezey/pylearn2,theoryno3/pylearn2,aalmah/pylearn2,CIFASIS/pylearn2,mclaughlin6464/pylearn2,mclaughlin6464/pylearn2,ashhher3/pylearn2,lisa-lab/pylearn2,aalmah/pylearn2,junbochen/pylearn2,woozzu/pylearn2,JesseLivezey/pylearn2,goodfeli/pylearn2,hantek/pylearn2,cosmoharrigan/pylearn2,aalmah/pylearn2,fishcorn/pylearn2,CIFASIS/pylearn2,lunyang/pylearn2,woozzu/pylearn2,ashhher3/pylearn2,junbochen/pylearn2,pkainz/pylearn2,ashhher3/pylearn2,cosmoharrigan/pylearn2,skearnes/pylearn2,shiquanwang/pylearn2,lisa-lab/pylearn2,KennethPierce/pylearnk,cosmoharrigan/pylearn2,abergeron/pylearn2,mkraemer67/pylearn2,chrish42/pylearn,kastnerkyle/pylearn2,pkainz/pylearn2,lancezlin/pylearn2,sandeepkbhat/pylearn2,se4u/pylearn2,kastnerkyle/pylearn2,JesseLivezey/plankton,TNick/pylearn2,kastnerkyle/pylearn2,caidongyun/pylearn2,junbochen/pylearn2,ddboline/pylearn2,goodfeli/pylearn2,caidongyun/pylearn2,w1kke/pylearn2,abergeron/pylearn2,nouiz/pylearn2,jamessergeant/pylearn2,ddboline/pylearn2,lisa-lab/pylearn2,fulmicoton/pylearn2,lamblin/pylearn2,fishcorn/pylearn2,jeremyfix/pylearn2,JesseLivezey/plankton,jeremyfix/pylearn2,mkraemer67/pylearn2,nouiz/pylearn2,jamessergeant/pylearn2,JesseLivezey/plankton,theoryno3/pylearn2,JesseLivezey/pylearn2,fulmicoton/pylearn2,skearnes/pylearn2,bartvm/pylearn2,shiquanwang/pylearn2,mkraemer67/pylearn2,chrish42/pylearn,mclaughlin6464/pylearn2,KennethPierce/pylearnk,lamblin/pylearn2,alexjc/pylearn2,Refefer/pylearn2,theoryno3/pylearn2,kose-y/pylearn2,shiquanwang/pylearn2,daemonmaker/pylearn2,hyqneuron/pylearn2-maxsom,Refefer/pylearn2,cosmoharrigan/pylearn2,jamessergeant/pylearn2,woozzu/pylearn2,lunyang/pylearn2,lancezlin/pylearn2,caidongyun/pylearn2,lunyang/pylearn2,caidongyun/pylearn2,hantek/pylearn2,matrogers/pylearn2,chrish42/pylearn,bartvm/pylearn2,jeremyfix/pylearn2,theoryno3/pylearn2,fyffyt/pylearn2,goodfeli/pylearn2,KennethPierce/pylearnk,junbochen/pylearn2,bartvm/pylearn2,msingh172/pylearn2,pombredanne/pylearn2,sandeepkbhat/pylearn2,hantek/pylearn2,alexjc/pylearn2,mclaughlin6464/pylearn2,JesseLivezey/pylearn2,kose-y/pylearn2,matrogers/pylearn2,lamblin/pylearn2,daemonmaker/pylearn2,msingh172/pylearn2,mkraemer67/pylearn2,se4u/pylearn2,TNick/pylearn2,KennethPierce/pylearnk,w1kke/pylearn2,kose-y/pylearn2,CIFASIS/pylearn2,JesseLivezey/plankton,pombredanne/pylearn2,kastnerkyle/pylearn2,goodfeli/pylearn2,w1kke/pylearn2,lisa-lab/pylearn2,pkainz/pylearn2,jeremyfix/pylearn2,fyffyt/pylearn2,sandeepkbhat/pylearn2,se4u/pylearn2,lamblin/pylearn2,Refefer/pylearn2,sandeepkbhat/pylearn2,alexjc/pylearn2,nouiz/pylearn2,daemonmaker/pylearn2,matrogers/pylearn2
|
#!/usr/bin/env python
import argparse
if __name__ == '__main__':
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument("metric", help="the desired metric",
choices=["ais"])
parser.add_argument("model_path", help="path to the pickled DBM model")
args = parser.parse_args()
metric = args.metric
model_path = args.model_path
Make the script recuperate the correct method
|
#!/usr/bin/env python
import argparse
from pylearn2.utils import serial
def compute_ais(model):
pass
if __name__ == '__main__':
# Possible metrics
metrics = {'ais': compute_ais}
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument("metric", help="the desired metric",
choices=metrics.keys())
parser.add_argument("model_path", help="path to the pickled DBM model")
args = parser.parse_args()
metric = metrics[args.metric]
model = serial.load(args.model_path)
metric(model)
|
<commit_before>#!/usr/bin/env python
import argparse
if __name__ == '__main__':
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument("metric", help="the desired metric",
choices=["ais"])
parser.add_argument("model_path", help="path to the pickled DBM model")
args = parser.parse_args()
metric = args.metric
model_path = args.model_path
<commit_msg>Make the script recuperate the correct method<commit_after>
|
#!/usr/bin/env python
import argparse
from pylearn2.utils import serial
def compute_ais(model):
pass
if __name__ == '__main__':
# Possible metrics
metrics = {'ais': compute_ais}
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument("metric", help="the desired metric",
choices=metrics.keys())
parser.add_argument("model_path", help="path to the pickled DBM model")
args = parser.parse_args()
metric = metrics[args.metric]
model = serial.load(args.model_path)
metric(model)
|
#!/usr/bin/env python
import argparse
if __name__ == '__main__':
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument("metric", help="the desired metric",
choices=["ais"])
parser.add_argument("model_path", help="path to the pickled DBM model")
args = parser.parse_args()
metric = args.metric
model_path = args.model_path
Make the script recuperate the correct method#!/usr/bin/env python
import argparse
from pylearn2.utils import serial
def compute_ais(model):
pass
if __name__ == '__main__':
# Possible metrics
metrics = {'ais': compute_ais}
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument("metric", help="the desired metric",
choices=metrics.keys())
parser.add_argument("model_path", help="path to the pickled DBM model")
args = parser.parse_args()
metric = metrics[args.metric]
model = serial.load(args.model_path)
metric(model)
|
<commit_before>#!/usr/bin/env python
import argparse
if __name__ == '__main__':
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument("metric", help="the desired metric",
choices=["ais"])
parser.add_argument("model_path", help="path to the pickled DBM model")
args = parser.parse_args()
metric = args.metric
model_path = args.model_path
<commit_msg>Make the script recuperate the correct method<commit_after>#!/usr/bin/env python
import argparse
from pylearn2.utils import serial
def compute_ais(model):
pass
if __name__ == '__main__':
# Possible metrics
metrics = {'ais': compute_ais}
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument("metric", help="the desired metric",
choices=metrics.keys())
parser.add_argument("model_path", help="path to the pickled DBM model")
args = parser.parse_args()
metric = metrics[args.metric]
model = serial.load(args.model_path)
metric(model)
|
31073969ed99dd6f57ff1959c050fd0f8f59f58c
|
tests/scipy_argrelextrema.py
|
tests/scipy_argrelextrema.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
# To get number of peaks:
# print("{} peaks".format(len(indexes[0])))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
|
Add eg to get number of peaks
|
Add eg to get number of peaks
|
Python
|
mit
|
MonsieurV/py-findpeaks,MonsieurV/py-findpeaks
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
Add eg to get number of peaks
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
# To get number of peaks:
# print("{} peaks".format(len(indexes[0])))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
<commit_msg>Add eg to get number of peaks<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
# To get number of peaks:
# print("{} peaks".format(len(indexes[0])))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
Add eg to get number of peaks#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
# To get number of peaks:
# print("{} peaks".format(len(indexes[0])))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
<commit_msg>Add eg to get number of peaks<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from vector import vector, plot_peaks
import scipy.signal
print('Detect peaks without any filters (maxima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater
)
print('Peaks are: %s' % (indexes[0]))
# To get number of peaks:
# print("{} peaks".format(len(indexes[0])))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks without any filters (minima).')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.less
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
algorithm='scipy.signal.argrelmax'
)
print('Detect peaks with order (distance) filter.')
indexes = scipy.signal.argrelextrema(
np.array(vector),
comparator=np.greater,
order=2
)
print('Peaks are: %s' % (indexes[0]))
plot_peaks(
np.array(vector),
indexes[0],
mpd=2, algorithm='scipy.signal.argrelmax'
)
|
84062292b62d68a14981bcebf18c01feda26fb01
|
src/plotter/comparison_plotter.py
|
src/plotter/comparison_plotter.py
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
from .constants import PLOT
class ComparisonPlotter:
def __init__(self, data_list):
self.trajectory_fig, self.trajectory_plot = plt.subplots(1, 1)
self.position_fig, self.position_plot = plt.subplots(2, 1, sharex=True)
self.position_error_fig, self.position_error_plot = plt.subplots(2, 1, sharex=True)
self.control_action_fig, self.control_action_plot = plt.subplots(2, 1, sharex=True)
temp_data = data_list[0]
self.t = temp_data['t']
self.x_ref = temp_data['x_ref']
self.y_ref = temp_data['y_ref']
self.zeros = temp_data['zeros']
def plot_comparison(self):
self.trajectory_plot.plot(self.x_ref, self.y_ref, 'r--', label=r'${\rm reference}$', lw=PLOT['line_width'])
plt.show()
if __name__ == '__main__':
steps = 100
plotter = ComparisonPlotter(
[
{'t': [i for i in range(steps)],
'x_ref': [0.5 * i for i in range(steps)],
'y_ref': [2.0 * i for i in range(steps)],
'zeros': [0.0 for _ in range(steps)],}
]
)
plotter.plot_comparison()
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
from .plotter import Plotter
from .constants import PLOT
class ComparisonPlotter(Plotter):
def __init__(self, data_list):
temp_data = data_list[0]
Plotter.__init__(self, temp_data['t'], temp_data['zeros'])
self.trajectory_fig, self.trajectory_plot = plt.subplots(1, 1)
self.position_fig, self.position_plot = plt.subplots(2, 1, sharex=True)
self.position_error_fig, self.position_error_plot = plt.subplots(2, 1, sharex=True)
self.control_action_fig, self.control_action_plot = plt.subplots(2, 1, sharex=True)
self.x_ref = temp_data['x_ref']
self.y_ref = temp_data['y_ref']
def plot_comparison(self):
self.trajectory_plot.plot(self.x_ref, self.y_ref, 'r--', label=r'${\rm reference}$', lw=PLOT['line_width'])
plt.show()
|
Make class ComparisonPlotter inherit from Plotter
|
feat: Make class ComparisonPlotter inherit from Plotter
|
Python
|
mit
|
bit0001/trajectory_tracking,bit0001/trajectory_tracking
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
from .constants import PLOT
class ComparisonPlotter:
def __init__(self, data_list):
self.trajectory_fig, self.trajectory_plot = plt.subplots(1, 1)
self.position_fig, self.position_plot = plt.subplots(2, 1, sharex=True)
self.position_error_fig, self.position_error_plot = plt.subplots(2, 1, sharex=True)
self.control_action_fig, self.control_action_plot = plt.subplots(2, 1, sharex=True)
temp_data = data_list[0]
self.t = temp_data['t']
self.x_ref = temp_data['x_ref']
self.y_ref = temp_data['y_ref']
self.zeros = temp_data['zeros']
def plot_comparison(self):
self.trajectory_plot.plot(self.x_ref, self.y_ref, 'r--', label=r'${\rm reference}$', lw=PLOT['line_width'])
plt.show()
if __name__ == '__main__':
steps = 100
plotter = ComparisonPlotter(
[
{'t': [i for i in range(steps)],
'x_ref': [0.5 * i for i in range(steps)],
'y_ref': [2.0 * i for i in range(steps)],
'zeros': [0.0 for _ in range(steps)],}
]
)
plotter.plot_comparison()
feat: Make class ComparisonPlotter inherit from Plotter
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
from .plotter import Plotter
from .constants import PLOT
class ComparisonPlotter(Plotter):
def __init__(self, data_list):
temp_data = data_list[0]
Plotter.__init__(self, temp_data['t'], temp_data['zeros'])
self.trajectory_fig, self.trajectory_plot = plt.subplots(1, 1)
self.position_fig, self.position_plot = plt.subplots(2, 1, sharex=True)
self.position_error_fig, self.position_error_plot = plt.subplots(2, 1, sharex=True)
self.control_action_fig, self.control_action_plot = plt.subplots(2, 1, sharex=True)
self.x_ref = temp_data['x_ref']
self.y_ref = temp_data['y_ref']
def plot_comparison(self):
self.trajectory_plot.plot(self.x_ref, self.y_ref, 'r--', label=r'${\rm reference}$', lw=PLOT['line_width'])
plt.show()
|
<commit_before>#!/usr/bin/env python
import matplotlib.pyplot as plt
from .constants import PLOT
class ComparisonPlotter:
def __init__(self, data_list):
self.trajectory_fig, self.trajectory_plot = plt.subplots(1, 1)
self.position_fig, self.position_plot = plt.subplots(2, 1, sharex=True)
self.position_error_fig, self.position_error_plot = plt.subplots(2, 1, sharex=True)
self.control_action_fig, self.control_action_plot = plt.subplots(2, 1, sharex=True)
temp_data = data_list[0]
self.t = temp_data['t']
self.x_ref = temp_data['x_ref']
self.y_ref = temp_data['y_ref']
self.zeros = temp_data['zeros']
def plot_comparison(self):
self.trajectory_plot.plot(self.x_ref, self.y_ref, 'r--', label=r'${\rm reference}$', lw=PLOT['line_width'])
plt.show()
if __name__ == '__main__':
steps = 100
plotter = ComparisonPlotter(
[
{'t': [i for i in range(steps)],
'x_ref': [0.5 * i for i in range(steps)],
'y_ref': [2.0 * i for i in range(steps)],
'zeros': [0.0 for _ in range(steps)],}
]
)
plotter.plot_comparison()
<commit_msg>feat: Make class ComparisonPlotter inherit from Plotter<commit_after>
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
from .plotter import Plotter
from .constants import PLOT
class ComparisonPlotter(Plotter):
def __init__(self, data_list):
temp_data = data_list[0]
Plotter.__init__(self, temp_data['t'], temp_data['zeros'])
self.trajectory_fig, self.trajectory_plot = plt.subplots(1, 1)
self.position_fig, self.position_plot = plt.subplots(2, 1, sharex=True)
self.position_error_fig, self.position_error_plot = plt.subplots(2, 1, sharex=True)
self.control_action_fig, self.control_action_plot = plt.subplots(2, 1, sharex=True)
self.x_ref = temp_data['x_ref']
self.y_ref = temp_data['y_ref']
def plot_comparison(self):
self.trajectory_plot.plot(self.x_ref, self.y_ref, 'r--', label=r'${\rm reference}$', lw=PLOT['line_width'])
plt.show()
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
from .constants import PLOT
class ComparisonPlotter:
def __init__(self, data_list):
self.trajectory_fig, self.trajectory_plot = plt.subplots(1, 1)
self.position_fig, self.position_plot = plt.subplots(2, 1, sharex=True)
self.position_error_fig, self.position_error_plot = plt.subplots(2, 1, sharex=True)
self.control_action_fig, self.control_action_plot = plt.subplots(2, 1, sharex=True)
temp_data = data_list[0]
self.t = temp_data['t']
self.x_ref = temp_data['x_ref']
self.y_ref = temp_data['y_ref']
self.zeros = temp_data['zeros']
def plot_comparison(self):
self.trajectory_plot.plot(self.x_ref, self.y_ref, 'r--', label=r'${\rm reference}$', lw=PLOT['line_width'])
plt.show()
if __name__ == '__main__':
steps = 100
plotter = ComparisonPlotter(
[
{'t': [i for i in range(steps)],
'x_ref': [0.5 * i for i in range(steps)],
'y_ref': [2.0 * i for i in range(steps)],
'zeros': [0.0 for _ in range(steps)],}
]
)
plotter.plot_comparison()
feat: Make class ComparisonPlotter inherit from Plotter#!/usr/bin/env python
import matplotlib.pyplot as plt
from .plotter import Plotter
from .constants import PLOT
class ComparisonPlotter(Plotter):
def __init__(self, data_list):
temp_data = data_list[0]
Plotter.__init__(self, temp_data['t'], temp_data['zeros'])
self.trajectory_fig, self.trajectory_plot = plt.subplots(1, 1)
self.position_fig, self.position_plot = plt.subplots(2, 1, sharex=True)
self.position_error_fig, self.position_error_plot = plt.subplots(2, 1, sharex=True)
self.control_action_fig, self.control_action_plot = plt.subplots(2, 1, sharex=True)
self.x_ref = temp_data['x_ref']
self.y_ref = temp_data['y_ref']
def plot_comparison(self):
self.trajectory_plot.plot(self.x_ref, self.y_ref, 'r--', label=r'${\rm reference}$', lw=PLOT['line_width'])
plt.show()
|
<commit_before>#!/usr/bin/env python
import matplotlib.pyplot as plt
from .constants import PLOT
class ComparisonPlotter:
def __init__(self, data_list):
self.trajectory_fig, self.trajectory_plot = plt.subplots(1, 1)
self.position_fig, self.position_plot = plt.subplots(2, 1, sharex=True)
self.position_error_fig, self.position_error_plot = plt.subplots(2, 1, sharex=True)
self.control_action_fig, self.control_action_plot = plt.subplots(2, 1, sharex=True)
temp_data = data_list[0]
self.t = temp_data['t']
self.x_ref = temp_data['x_ref']
self.y_ref = temp_data['y_ref']
self.zeros = temp_data['zeros']
def plot_comparison(self):
self.trajectory_plot.plot(self.x_ref, self.y_ref, 'r--', label=r'${\rm reference}$', lw=PLOT['line_width'])
plt.show()
if __name__ == '__main__':
steps = 100
plotter = ComparisonPlotter(
[
{'t': [i for i in range(steps)],
'x_ref': [0.5 * i for i in range(steps)],
'y_ref': [2.0 * i for i in range(steps)],
'zeros': [0.0 for _ in range(steps)],}
]
)
plotter.plot_comparison()
<commit_msg>feat: Make class ComparisonPlotter inherit from Plotter<commit_after>#!/usr/bin/env python
import matplotlib.pyplot as plt
from .plotter import Plotter
from .constants import PLOT
class ComparisonPlotter(Plotter):
def __init__(self, data_list):
temp_data = data_list[0]
Plotter.__init__(self, temp_data['t'], temp_data['zeros'])
self.trajectory_fig, self.trajectory_plot = plt.subplots(1, 1)
self.position_fig, self.position_plot = plt.subplots(2, 1, sharex=True)
self.position_error_fig, self.position_error_plot = plt.subplots(2, 1, sharex=True)
self.control_action_fig, self.control_action_plot = plt.subplots(2, 1, sharex=True)
self.x_ref = temp_data['x_ref']
self.y_ref = temp_data['y_ref']
def plot_comparison(self):
self.trajectory_plot.plot(self.x_ref, self.y_ref, 'r--', label=r'${\rm reference}$', lw=PLOT['line_width'])
plt.show()
|
4c0d88fe4d3fb935c5040fa39c5db10f74e6908f
|
pinax/stripe/utils.py
|
pinax/stripe/utils.py
|
import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
|
import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if response is not None and not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
|
Handle case when response is None
|
Handle case when response is None
|
Python
|
mit
|
pinax/django-stripe-payments
|
import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
Handle case when response is None
|
import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if response is not None and not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
|
<commit_before>import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
<commit_msg>Handle case when response is None<commit_after>
|
import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if response is not None and not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
|
import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
Handle case when response is Noneimport datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if response is not None and not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
|
<commit_before>import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
<commit_msg>Handle case when response is None<commit_after>import datetime
import decimal
from django.utils import timezone
def convert_tstamp(response, field_name=None):
try:
if field_name and response[field_name]:
return datetime.datetime.fromtimestamp(
response[field_name],
timezone.utc
)
if response is not None and not field_name:
return datetime.datetime.fromtimestamp(
response,
timezone.utc
)
except KeyError:
pass
return None
# currencies those amount=1 means 100 cents
# https://support.stripe.com/questions/which-zero-decimal-currencies-does-stripe-support
ZERO_DECIMAL_CURRENCIES = [
"bif", "clp", "djf", "gnf", "jpy", "kmf", "krw",
"mga", "pyg", "rwf", "vuv", "xaf", "xof", "xpf",
]
def convert_amount_for_db(amount, currency="usd"):
if currency is None: # @@@ not sure if this is right; find out what we should do when API returns null for currency
currency = "usd"
return (amount / decimal.Decimal("100")) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else decimal.Decimal(amount)
def convert_amount_for_api(amount, currency="usd"):
if currency is None:
currency = "usd"
return int(amount * 100) if currency.lower() not in ZERO_DECIMAL_CURRENCIES else int(amount)
|
a7ba6ece76e768e642a6ed264791e3987f7c7629
|
apps/user_app/forms.py
|
apps/user_app/forms.py
|
from django import forms
from django.core import validators
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True,) #validators=[self.isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
# def isValidUserName(self, field_data, all_data):
# try:
# User.objects.get(username=field_data)
# except User.DoesNotExist:
# return
# raise validators.ValidationError('The username "%s" is already taken.' % field_data)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
|
from django import forms
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
def isValidUserName(username):
try:
User.objects.get(username=username)
except User.DoesNotExist:
return
raise ValidationError('The username "%s" is already taken.' % username)
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True, validators=[isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
|
Implement validation to the username field.
|
Implement validation to the username field.
|
Python
|
mit
|
pedrolinhares/po-po-modoro,pedrolinhares/po-po-modoro
|
from django import forms
from django.core import validators
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True,) #validators=[self.isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
# def isValidUserName(self, field_data, all_data):
# try:
# User.objects.get(username=field_data)
# except User.DoesNotExist:
# return
# raise validators.ValidationError('The username "%s" is already taken.' % field_data)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
Implement validation to the username field.
|
from django import forms
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
def isValidUserName(username):
try:
User.objects.get(username=username)
except User.DoesNotExist:
return
raise ValidationError('The username "%s" is already taken.' % username)
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True, validators=[isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
|
<commit_before>from django import forms
from django.core import validators
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True,) #validators=[self.isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
# def isValidUserName(self, field_data, all_data):
# try:
# User.objects.get(username=field_data)
# except User.DoesNotExist:
# return
# raise validators.ValidationError('The username "%s" is already taken.' % field_data)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
<commit_msg>Implement validation to the username field.<commit_after>
|
from django import forms
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
def isValidUserName(username):
try:
User.objects.get(username=username)
except User.DoesNotExist:
return
raise ValidationError('The username "%s" is already taken.' % username)
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True, validators=[isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
|
from django import forms
from django.core import validators
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True,) #validators=[self.isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
# def isValidUserName(self, field_data, all_data):
# try:
# User.objects.get(username=field_data)
# except User.DoesNotExist:
# return
# raise validators.ValidationError('The username "%s" is already taken.' % field_data)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
Implement validation to the username field.from django import forms
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
def isValidUserName(username):
try:
User.objects.get(username=username)
except User.DoesNotExist:
return
raise ValidationError('The username "%s" is already taken.' % username)
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True, validators=[isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
|
<commit_before>from django import forms
from django.core import validators
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True,) #validators=[self.isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
# def isValidUserName(self, field_data, all_data):
# try:
# User.objects.get(username=field_data)
# except User.DoesNotExist:
# return
# raise validators.ValidationError('The username "%s" is already taken.' % field_data)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
<commit_msg>Implement validation to the username field.<commit_after>from django import forms
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
def isValidUserName(username):
try:
User.objects.get(username=username)
except User.DoesNotExist:
return
raise ValidationError('The username "%s" is already taken.' % username)
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True, validators=[isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
|
5553481f8cc8537febbf24fbfea4315a3b61548f
|
corehq/apps/commtrack/management/commands/check_multiple_parentage.py
|
corehq/apps/commtrack/management/commands/check_multiple_parentage.py
|
from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
class Command(BaseCommand):
def handle(self, *args, **options):
self.stdout.write("Populating site codes...\n")
domains = Domain.get_all()
for d in domains:
if d.commtrack_enabled:
for loc_type in d.commtrack_settings.location_types:
if len(loc_type.allowed_parents) > 1:
self.stdout.write(
"Found multiple parent options in domain: " +
d.name
)
|
from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
from corehq.apps.locations.models import Location
import csv
class Command(BaseCommand):
def handle(self, *args, **options):
with open('parentage_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow([
'id',
'name',
'is_test',
'location_type',
'number_of_offending_locations',
])
domains = Domain.get_all()
for d in domains:
if d.commtrack_enabled:
for loc_type in d.commtrack_settings.location_types:
if len(loc_type.allowed_parents) > 1:
count = len(list(
Location.filter_by_type(
d.name,
loc_type.name,
)
))
csv_writer.writerow([
d._id,
d.name,
d.is_test,
loc_type.name,
count
])
|
Switch to CSV and add important info
|
Switch to CSV and add important info
|
Python
|
bsd-3-clause
|
puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
class Command(BaseCommand):
def handle(self, *args, **options):
self.stdout.write("Populating site codes...\n")
domains = Domain.get_all()
for d in domains:
if d.commtrack_enabled:
for loc_type in d.commtrack_settings.location_types:
if len(loc_type.allowed_parents) > 1:
self.stdout.write(
"Found multiple parent options in domain: " +
d.name
)
Switch to CSV and add important info
|
from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
from corehq.apps.locations.models import Location
import csv
class Command(BaseCommand):
def handle(self, *args, **options):
with open('parentage_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow([
'id',
'name',
'is_test',
'location_type',
'number_of_offending_locations',
])
domains = Domain.get_all()
for d in domains:
if d.commtrack_enabled:
for loc_type in d.commtrack_settings.location_types:
if len(loc_type.allowed_parents) > 1:
count = len(list(
Location.filter_by_type(
d.name,
loc_type.name,
)
))
csv_writer.writerow([
d._id,
d.name,
d.is_test,
loc_type.name,
count
])
|
<commit_before>from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
class Command(BaseCommand):
def handle(self, *args, **options):
self.stdout.write("Populating site codes...\n")
domains = Domain.get_all()
for d in domains:
if d.commtrack_enabled:
for loc_type in d.commtrack_settings.location_types:
if len(loc_type.allowed_parents) > 1:
self.stdout.write(
"Found multiple parent options in domain: " +
d.name
)
<commit_msg>Switch to CSV and add important info<commit_after>
|
from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
from corehq.apps.locations.models import Location
import csv
class Command(BaseCommand):
def handle(self, *args, **options):
with open('parentage_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow([
'id',
'name',
'is_test',
'location_type',
'number_of_offending_locations',
])
domains = Domain.get_all()
for d in domains:
if d.commtrack_enabled:
for loc_type in d.commtrack_settings.location_types:
if len(loc_type.allowed_parents) > 1:
count = len(list(
Location.filter_by_type(
d.name,
loc_type.name,
)
))
csv_writer.writerow([
d._id,
d.name,
d.is_test,
loc_type.name,
count
])
|
from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
class Command(BaseCommand):
def handle(self, *args, **options):
self.stdout.write("Populating site codes...\n")
domains = Domain.get_all()
for d in domains:
if d.commtrack_enabled:
for loc_type in d.commtrack_settings.location_types:
if len(loc_type.allowed_parents) > 1:
self.stdout.write(
"Found multiple parent options in domain: " +
d.name
)
Switch to CSV and add important infofrom django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
from corehq.apps.locations.models import Location
import csv
class Command(BaseCommand):
def handle(self, *args, **options):
with open('parentage_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow([
'id',
'name',
'is_test',
'location_type',
'number_of_offending_locations',
])
domains = Domain.get_all()
for d in domains:
if d.commtrack_enabled:
for loc_type in d.commtrack_settings.location_types:
if len(loc_type.allowed_parents) > 1:
count = len(list(
Location.filter_by_type(
d.name,
loc_type.name,
)
))
csv_writer.writerow([
d._id,
d.name,
d.is_test,
loc_type.name,
count
])
|
<commit_before>from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
class Command(BaseCommand):
def handle(self, *args, **options):
self.stdout.write("Populating site codes...\n")
domains = Domain.get_all()
for d in domains:
if d.commtrack_enabled:
for loc_type in d.commtrack_settings.location_types:
if len(loc_type.allowed_parents) > 1:
self.stdout.write(
"Found multiple parent options in domain: " +
d.name
)
<commit_msg>Switch to CSV and add important info<commit_after>from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
from corehq.apps.locations.models import Location
import csv
class Command(BaseCommand):
def handle(self, *args, **options):
with open('parentage_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow([
'id',
'name',
'is_test',
'location_type',
'number_of_offending_locations',
])
domains = Domain.get_all()
for d in domains:
if d.commtrack_enabled:
for loc_type in d.commtrack_settings.location_types:
if len(loc_type.allowed_parents) > 1:
count = len(list(
Location.filter_by_type(
d.name,
loc_type.name,
)
))
csv_writer.writerow([
d._id,
d.name,
d.is_test,
loc_type.name,
count
])
|
adb0bffd6586fa380a077f1ec0b950c6ae5d8b4f
|
bin/reporting-api.py
|
bin/reporting-api.py
|
#!/usr/bin/python
"""
Start the Reporting API application using Paste Deploy.
"""
import sys
import os
from paste.deploy import loadapp, loadserver
import logging
import argparse
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--confdir', action='store', required=True, help="Specify config directory")
parser.add_argument('--logfile', action='store', required=False, default='/var/log/reporting-api.log',
help="Specify the file to log to")
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
logging.basicConfig(
filename=args.logfile, level=logging.INFO
)
PASTE_CONFIG = os.path.join(args.confdir, 'paste.config')
REPORTING_APP = loadapp('config:' + PASTE_CONFIG)
SERVER = loadserver('config:' + PASTE_CONFIG)
SERVER(REPORTING_APP)
|
#!/usr/bin/python
"""
Start the Reporting API application using Paste Deploy.
"""
import sys
import os
from paste.deploy import loadapp, loadserver
import logging
import argparse
def parse_args():
REALFILE = os.path.realpath(__file__)
REALDIR = os.path.dirname(REALFILE)
PARDIR = os.path.realpath(os.path.join(REALDIR, os.pardir))
CONFDIR = os.path.join(PARDIR, 'reporting_api', 'conf')
parser = argparse.ArgumentParser()
parser.add_argument(
'-c',
'--confdir',
action='store',
required=False,
default=CONFDIR,
help="Specify config directory"
)
parser.add_argument(
'-l',
'--logfile',
action='store',
required=False,
default='/var/log/reporting-api.log',
help="Specify the file to log to"
)
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
logging.basicConfig(
filename=args.logfile, level=logging.INFO
)
PASTE_CONFIG = os.path.join(args.confdir, 'paste.config')
REPORTING_APP = loadapp('config:' + PASTE_CONFIG)
SERVER = loadserver('config:' + PASTE_CONFIG)
SERVER(REPORTING_APP)
|
Make the confdir option optional, with default value the conf subdir in the source tree. Wrap long lines. Add a -l alias for option --logfile.
|
Make the confdir option optional, with default value the conf subdir in the source tree. Wrap long lines. Add a -l alias for option --logfile.
|
Python
|
apache-2.0
|
NCI-Cloud/reporting-api,NeCTAR-RC/reporting-api,NCI-Cloud/reporting-api,NeCTAR-RC/reporting-api
|
#!/usr/bin/python
"""
Start the Reporting API application using Paste Deploy.
"""
import sys
import os
from paste.deploy import loadapp, loadserver
import logging
import argparse
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--confdir', action='store', required=True, help="Specify config directory")
parser.add_argument('--logfile', action='store', required=False, default='/var/log/reporting-api.log',
help="Specify the file to log to")
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
logging.basicConfig(
filename=args.logfile, level=logging.INFO
)
PASTE_CONFIG = os.path.join(args.confdir, 'paste.config')
REPORTING_APP = loadapp('config:' + PASTE_CONFIG)
SERVER = loadserver('config:' + PASTE_CONFIG)
SERVER(REPORTING_APP)
Make the confdir option optional, with default value the conf subdir in the source tree. Wrap long lines. Add a -l alias for option --logfile.
|
#!/usr/bin/python
"""
Start the Reporting API application using Paste Deploy.
"""
import sys
import os
from paste.deploy import loadapp, loadserver
import logging
import argparse
def parse_args():
REALFILE = os.path.realpath(__file__)
REALDIR = os.path.dirname(REALFILE)
PARDIR = os.path.realpath(os.path.join(REALDIR, os.pardir))
CONFDIR = os.path.join(PARDIR, 'reporting_api', 'conf')
parser = argparse.ArgumentParser()
parser.add_argument(
'-c',
'--confdir',
action='store',
required=False,
default=CONFDIR,
help="Specify config directory"
)
parser.add_argument(
'-l',
'--logfile',
action='store',
required=False,
default='/var/log/reporting-api.log',
help="Specify the file to log to"
)
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
logging.basicConfig(
filename=args.logfile, level=logging.INFO
)
PASTE_CONFIG = os.path.join(args.confdir, 'paste.config')
REPORTING_APP = loadapp('config:' + PASTE_CONFIG)
SERVER = loadserver('config:' + PASTE_CONFIG)
SERVER(REPORTING_APP)
|
<commit_before>#!/usr/bin/python
"""
Start the Reporting API application using Paste Deploy.
"""
import sys
import os
from paste.deploy import loadapp, loadserver
import logging
import argparse
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--confdir', action='store', required=True, help="Specify config directory")
parser.add_argument('--logfile', action='store', required=False, default='/var/log/reporting-api.log',
help="Specify the file to log to")
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
logging.basicConfig(
filename=args.logfile, level=logging.INFO
)
PASTE_CONFIG = os.path.join(args.confdir, 'paste.config')
REPORTING_APP = loadapp('config:' + PASTE_CONFIG)
SERVER = loadserver('config:' + PASTE_CONFIG)
SERVER(REPORTING_APP)
<commit_msg>Make the confdir option optional, with default value the conf subdir in the source tree. Wrap long lines. Add a -l alias for option --logfile.<commit_after>
|
#!/usr/bin/python
"""
Start the Reporting API application using Paste Deploy.
"""
import sys
import os
from paste.deploy import loadapp, loadserver
import logging
import argparse
def parse_args():
REALFILE = os.path.realpath(__file__)
REALDIR = os.path.dirname(REALFILE)
PARDIR = os.path.realpath(os.path.join(REALDIR, os.pardir))
CONFDIR = os.path.join(PARDIR, 'reporting_api', 'conf')
parser = argparse.ArgumentParser()
parser.add_argument(
'-c',
'--confdir',
action='store',
required=False,
default=CONFDIR,
help="Specify config directory"
)
parser.add_argument(
'-l',
'--logfile',
action='store',
required=False,
default='/var/log/reporting-api.log',
help="Specify the file to log to"
)
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
logging.basicConfig(
filename=args.logfile, level=logging.INFO
)
PASTE_CONFIG = os.path.join(args.confdir, 'paste.config')
REPORTING_APP = loadapp('config:' + PASTE_CONFIG)
SERVER = loadserver('config:' + PASTE_CONFIG)
SERVER(REPORTING_APP)
|
#!/usr/bin/python
"""
Start the Reporting API application using Paste Deploy.
"""
import sys
import os
from paste.deploy import loadapp, loadserver
import logging
import argparse
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--confdir', action='store', required=True, help="Specify config directory")
parser.add_argument('--logfile', action='store', required=False, default='/var/log/reporting-api.log',
help="Specify the file to log to")
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
logging.basicConfig(
filename=args.logfile, level=logging.INFO
)
PASTE_CONFIG = os.path.join(args.confdir, 'paste.config')
REPORTING_APP = loadapp('config:' + PASTE_CONFIG)
SERVER = loadserver('config:' + PASTE_CONFIG)
SERVER(REPORTING_APP)
Make the confdir option optional, with default value the conf subdir in the source tree. Wrap long lines. Add a -l alias for option --logfile.#!/usr/bin/python
"""
Start the Reporting API application using Paste Deploy.
"""
import sys
import os
from paste.deploy import loadapp, loadserver
import logging
import argparse
def parse_args():
REALFILE = os.path.realpath(__file__)
REALDIR = os.path.dirname(REALFILE)
PARDIR = os.path.realpath(os.path.join(REALDIR, os.pardir))
CONFDIR = os.path.join(PARDIR, 'reporting_api', 'conf')
parser = argparse.ArgumentParser()
parser.add_argument(
'-c',
'--confdir',
action='store',
required=False,
default=CONFDIR,
help="Specify config directory"
)
parser.add_argument(
'-l',
'--logfile',
action='store',
required=False,
default='/var/log/reporting-api.log',
help="Specify the file to log to"
)
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
logging.basicConfig(
filename=args.logfile, level=logging.INFO
)
PASTE_CONFIG = os.path.join(args.confdir, 'paste.config')
REPORTING_APP = loadapp('config:' + PASTE_CONFIG)
SERVER = loadserver('config:' + PASTE_CONFIG)
SERVER(REPORTING_APP)
|
<commit_before>#!/usr/bin/python
"""
Start the Reporting API application using Paste Deploy.
"""
import sys
import os
from paste.deploy import loadapp, loadserver
import logging
import argparse
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--confdir', action='store', required=True, help="Specify config directory")
parser.add_argument('--logfile', action='store', required=False, default='/var/log/reporting-api.log',
help="Specify the file to log to")
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
logging.basicConfig(
filename=args.logfile, level=logging.INFO
)
PASTE_CONFIG = os.path.join(args.confdir, 'paste.config')
REPORTING_APP = loadapp('config:' + PASTE_CONFIG)
SERVER = loadserver('config:' + PASTE_CONFIG)
SERVER(REPORTING_APP)
<commit_msg>Make the confdir option optional, with default value the conf subdir in the source tree. Wrap long lines. Add a -l alias for option --logfile.<commit_after>#!/usr/bin/python
"""
Start the Reporting API application using Paste Deploy.
"""
import sys
import os
from paste.deploy import loadapp, loadserver
import logging
import argparse
def parse_args():
REALFILE = os.path.realpath(__file__)
REALDIR = os.path.dirname(REALFILE)
PARDIR = os.path.realpath(os.path.join(REALDIR, os.pardir))
CONFDIR = os.path.join(PARDIR, 'reporting_api', 'conf')
parser = argparse.ArgumentParser()
parser.add_argument(
'-c',
'--confdir',
action='store',
required=False,
default=CONFDIR,
help="Specify config directory"
)
parser.add_argument(
'-l',
'--logfile',
action='store',
required=False,
default='/var/log/reporting-api.log',
help="Specify the file to log to"
)
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
logging.basicConfig(
filename=args.logfile, level=logging.INFO
)
PASTE_CONFIG = os.path.join(args.confdir, 'paste.config')
REPORTING_APP = loadapp('config:' + PASTE_CONFIG)
SERVER = loadserver('config:' + PASTE_CONFIG)
SERVER(REPORTING_APP)
|
6b92c4d155d066fb6f4e9180acb4ad07d7fc313d
|
pskb_website/utils.py
|
pskb_website/utils.py
|
import re
from unicodedata import normalize
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
# From http://flask.pocoo.org/snippets/5/
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
|
import re
from unicodedata import normalize
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.:]+')
# From http://flask.pocoo.org/snippets/5/
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
|
Change ":" in titles to "-" for better SEO
|
Change ":" in titles to "-" for better SEO
|
Python
|
agpl-3.0
|
paulocheque/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms
|
import re
from unicodedata import normalize
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
# From http://flask.pocoo.org/snippets/5/
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
Change ":" in titles to "-" for better SEO
|
import re
from unicodedata import normalize
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.:]+')
# From http://flask.pocoo.org/snippets/5/
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
|
<commit_before>import re
from unicodedata import normalize
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
# From http://flask.pocoo.org/snippets/5/
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
<commit_msg>Change ":" in titles to "-" for better SEO<commit_after>
|
import re
from unicodedata import normalize
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.:]+')
# From http://flask.pocoo.org/snippets/5/
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
|
import re
from unicodedata import normalize
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
# From http://flask.pocoo.org/snippets/5/
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
Change ":" in titles to "-" for better SEOimport re
from unicodedata import normalize
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.:]+')
# From http://flask.pocoo.org/snippets/5/
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
|
<commit_before>import re
from unicodedata import normalize
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
# From http://flask.pocoo.org/snippets/5/
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
<commit_msg>Change ":" in titles to "-" for better SEO<commit_after>import re
from unicodedata import normalize
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.:]+')
# From http://flask.pocoo.org/snippets/5/
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
|
e3db38f0de04ab3e1126f3417fcdd99ab7d2e81c
|
flask_ldap_login/check.py
|
flask_ldap_login/check.py
|
"""
Check that application ldap creds are set up correctly.
"""
from argparse import ArgumentParser
from pprint import pprint
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', required=True,
help='Ldap login with this username')
parser.add_argument('-p', '--password', required=True,
help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(args.username, args.password)
print("Got userdata for %s" % args.username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
|
"""
Check that application ldap creds are set up correctly.
"""
from argparse import ArgumentParser
from pprint import pprint
import getpass
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', help='Ldap login with this username')
parser.add_argument('-p', '--password', help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
username = args.username or raw_input('Username: ')
password = args.password or getpass.getpass()
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(username, password)
print("Got userdata for %s" % username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
|
Use getpass to get password
|
Use getpass to get password
|
Python
|
bsd-2-clause
|
ContinuumIO/flask-ldap-login,ContinuumIO/flask-ldap-login
|
"""
Check that application ldap creds are set up correctly.
"""
from argparse import ArgumentParser
from pprint import pprint
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', required=True,
help='Ldap login with this username')
parser.add_argument('-p', '--password', required=True,
help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(args.username, args.password)
print("Got userdata for %s" % args.username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
Use getpass to get password
|
"""
Check that application ldap creds are set up correctly.
"""
from argparse import ArgumentParser
from pprint import pprint
import getpass
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', help='Ldap login with this username')
parser.add_argument('-p', '--password', help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
username = args.username or raw_input('Username: ')
password = args.password or getpass.getpass()
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(username, password)
print("Got userdata for %s" % username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
|
<commit_before>"""
Check that application ldap creds are set up correctly.
"""
from argparse import ArgumentParser
from pprint import pprint
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', required=True,
help='Ldap login with this username')
parser.add_argument('-p', '--password', required=True,
help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(args.username, args.password)
print("Got userdata for %s" % args.username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
<commit_msg>Use getpass to get password<commit_after>
|
"""
Check that application ldap creds are set up correctly.
"""
from argparse import ArgumentParser
from pprint import pprint
import getpass
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', help='Ldap login with this username')
parser.add_argument('-p', '--password', help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
username = args.username or raw_input('Username: ')
password = args.password or getpass.getpass()
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(username, password)
print("Got userdata for %s" % username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
|
"""
Check that application ldap creds are set up correctly.
"""
from argparse import ArgumentParser
from pprint import pprint
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', required=True,
help='Ldap login with this username')
parser.add_argument('-p', '--password', required=True,
help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(args.username, args.password)
print("Got userdata for %s" % args.username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
Use getpass to get password"""
Check that application ldap creds are set up correctly.
"""
from argparse import ArgumentParser
from pprint import pprint
import getpass
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', help='Ldap login with this username')
parser.add_argument('-p', '--password', help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
username = args.username or raw_input('Username: ')
password = args.password or getpass.getpass()
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(username, password)
print("Got userdata for %s" % username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
|
<commit_before>"""
Check that application ldap creds are set up correctly.
"""
from argparse import ArgumentParser
from pprint import pprint
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', required=True,
help='Ldap login with this username')
parser.add_argument('-p', '--password', required=True,
help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(args.username, args.password)
print("Got userdata for %s" % args.username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
<commit_msg>Use getpass to get password<commit_after>"""
Check that application ldap creds are set up correctly.
"""
from argparse import ArgumentParser
from pprint import pprint
import getpass
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', help='Ldap login with this username')
parser.add_argument('-p', '--password', help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
username = args.username or raw_input('Username: ')
password = args.password or getpass.getpass()
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(username, password)
print("Got userdata for %s" % username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
|
f735b7801b68f44a40d5aa2068213ffe94f5a0b9
|
polling_stations/apps/data_collection/management/commands/import_high_peak.py
|
polling_stations/apps/data_collection/management/commands/import_high_peak.py
|
from data_collection.management.commands import BaseShpStationsShpDistrictsImporter
class Command(BaseShpStationsShpDistrictsImporter):
srid = 27700
council_id = 'E07000037'
districts_name = 'High Peak Polling Districts'
stations_name = 'High Peak Polling Districts.shp'
elections = [
'local.derbyshire.2017-05-04',
'parl.2017-06-08'
]
def district_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'name': name,
'polling_station_id': code,
}
def station_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'postcode': '',
'address': str(record[1]).strip(),
'location': None,
}
|
from data_collection.management.commands import BaseShpStationsShpDistrictsImporter
class Command(BaseShpStationsShpDistrictsImporter):
srid = 27700
council_id = 'E07000037'
districts_name = 'High Peak Polling Districts'
stations_name = 'High Peak Polling Districts.shp'
elections = [
'local.derbyshire.2017-05-04',
#'parl.2017-06-08'
]
def district_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'name': name,
'polling_station_id': code,
}
def station_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'postcode': '',
'address': str(record[1]).strip(),
'location': None,
}
|
Remove High Peak election id (complaint from user)
|
Remove High Peak election id (complaint from user)
|
Python
|
bsd-3-clause
|
DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations
|
from data_collection.management.commands import BaseShpStationsShpDistrictsImporter
class Command(BaseShpStationsShpDistrictsImporter):
srid = 27700
council_id = 'E07000037'
districts_name = 'High Peak Polling Districts'
stations_name = 'High Peak Polling Districts.shp'
elections = [
'local.derbyshire.2017-05-04',
'parl.2017-06-08'
]
def district_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'name': name,
'polling_station_id': code,
}
def station_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'postcode': '',
'address': str(record[1]).strip(),
'location': None,
}
Remove High Peak election id (complaint from user)
|
from data_collection.management.commands import BaseShpStationsShpDistrictsImporter
class Command(BaseShpStationsShpDistrictsImporter):
srid = 27700
council_id = 'E07000037'
districts_name = 'High Peak Polling Districts'
stations_name = 'High Peak Polling Districts.shp'
elections = [
'local.derbyshire.2017-05-04',
#'parl.2017-06-08'
]
def district_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'name': name,
'polling_station_id': code,
}
def station_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'postcode': '',
'address': str(record[1]).strip(),
'location': None,
}
|
<commit_before>from data_collection.management.commands import BaseShpStationsShpDistrictsImporter
class Command(BaseShpStationsShpDistrictsImporter):
srid = 27700
council_id = 'E07000037'
districts_name = 'High Peak Polling Districts'
stations_name = 'High Peak Polling Districts.shp'
elections = [
'local.derbyshire.2017-05-04',
'parl.2017-06-08'
]
def district_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'name': name,
'polling_station_id': code,
}
def station_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'postcode': '',
'address': str(record[1]).strip(),
'location': None,
}
<commit_msg>Remove High Peak election id (complaint from user)<commit_after>
|
from data_collection.management.commands import BaseShpStationsShpDistrictsImporter
class Command(BaseShpStationsShpDistrictsImporter):
srid = 27700
council_id = 'E07000037'
districts_name = 'High Peak Polling Districts'
stations_name = 'High Peak Polling Districts.shp'
elections = [
'local.derbyshire.2017-05-04',
#'parl.2017-06-08'
]
def district_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'name': name,
'polling_station_id': code,
}
def station_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'postcode': '',
'address': str(record[1]).strip(),
'location': None,
}
|
from data_collection.management.commands import BaseShpStationsShpDistrictsImporter
class Command(BaseShpStationsShpDistrictsImporter):
srid = 27700
council_id = 'E07000037'
districts_name = 'High Peak Polling Districts'
stations_name = 'High Peak Polling Districts.shp'
elections = [
'local.derbyshire.2017-05-04',
'parl.2017-06-08'
]
def district_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'name': name,
'polling_station_id': code,
}
def station_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'postcode': '',
'address': str(record[1]).strip(),
'location': None,
}
Remove High Peak election id (complaint from user)from data_collection.management.commands import BaseShpStationsShpDistrictsImporter
class Command(BaseShpStationsShpDistrictsImporter):
srid = 27700
council_id = 'E07000037'
districts_name = 'High Peak Polling Districts'
stations_name = 'High Peak Polling Districts.shp'
elections = [
'local.derbyshire.2017-05-04',
#'parl.2017-06-08'
]
def district_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'name': name,
'polling_station_id': code,
}
def station_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'postcode': '',
'address': str(record[1]).strip(),
'location': None,
}
|
<commit_before>from data_collection.management.commands import BaseShpStationsShpDistrictsImporter
class Command(BaseShpStationsShpDistrictsImporter):
srid = 27700
council_id = 'E07000037'
districts_name = 'High Peak Polling Districts'
stations_name = 'High Peak Polling Districts.shp'
elections = [
'local.derbyshire.2017-05-04',
'parl.2017-06-08'
]
def district_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'name': name,
'polling_station_id': code,
}
def station_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'postcode': '',
'address': str(record[1]).strip(),
'location': None,
}
<commit_msg>Remove High Peak election id (complaint from user)<commit_after>from data_collection.management.commands import BaseShpStationsShpDistrictsImporter
class Command(BaseShpStationsShpDistrictsImporter):
srid = 27700
council_id = 'E07000037'
districts_name = 'High Peak Polling Districts'
stations_name = 'High Peak Polling Districts.shp'
elections = [
'local.derbyshire.2017-05-04',
#'parl.2017-06-08'
]
def district_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'name': name,
'polling_station_id': code,
}
def station_record_to_dict(self, record):
name = str(record[0]).strip()
# codes are embedded in the name string: extract them
code = name[name.find("(")+1:name.find(")")].strip()
return {
'internal_council_id': code,
'postcode': '',
'address': str(record[1]).strip(),
'location': None,
}
|
7ed188bcaf38a25fb63fbb1ed3b070428ff95759
|
setuptools/tests/test_setopt.py
|
setuptools/tests/test_setopt.py
|
# coding: utf-8
from __future__ import unicode_literals
import io
import six
from setuptools.command import setopt
from setuptools.extern.six.moves import configparser
class TestEdit:
@staticmethod
def parse_config(filename):
parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader)
return parser
@staticmethod
def write_text(file, content):
with io.open(file, 'wb') as strm:
strm.write(content.encode('utf-8'))
def test_utf8_encoding_retained(self, tmpdir):
"""
When editing a file, non-ASCII characters encoded in
UTF-8 should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\njaraco=йарацо')
setopt.edit_config(str(config), dict(names=dict(other='yes')))
parser = self.parse_config(str(config))
assert parser.get('names', 'jaraco') == 'йарацо'
assert parser.get('names', 'other') == 'yes'
|
# coding: utf-8
from __future__ import unicode_literals
import io
import six
from setuptools.command import setopt
from setuptools.extern.six.moves import configparser
class TestEdit:
@staticmethod
def parse_config(filename):
parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader)
return parser
@staticmethod
def write_text(file, content):
with io.open(file, 'wb') as strm:
strm.write(content.encode('utf-8'))
def test_utf8_encoding_retained(self, tmpdir):
"""
When editing a file, non-ASCII characters encoded in
UTF-8 should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\njaraco=джарако')
setopt.edit_config(str(config), dict(names=dict(other='yes')))
parser = self.parse_config(str(config))
assert parser.get('names', 'jaraco') == 'джарако'
assert parser.get('names', 'other') == 'yes'
|
Correct cyrillic to match preferred pronunciation.
|
Correct cyrillic to match preferred pronunciation.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
# coding: utf-8
from __future__ import unicode_literals
import io
import six
from setuptools.command import setopt
from setuptools.extern.six.moves import configparser
class TestEdit:
@staticmethod
def parse_config(filename):
parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader)
return parser
@staticmethod
def write_text(file, content):
with io.open(file, 'wb') as strm:
strm.write(content.encode('utf-8'))
def test_utf8_encoding_retained(self, tmpdir):
"""
When editing a file, non-ASCII characters encoded in
UTF-8 should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\njaraco=йарацо')
setopt.edit_config(str(config), dict(names=dict(other='yes')))
parser = self.parse_config(str(config))
assert parser.get('names', 'jaraco') == 'йарацо'
assert parser.get('names', 'other') == 'yes'
Correct cyrillic to match preferred pronunciation.
|
# coding: utf-8
from __future__ import unicode_literals
import io
import six
from setuptools.command import setopt
from setuptools.extern.six.moves import configparser
class TestEdit:
@staticmethod
def parse_config(filename):
parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader)
return parser
@staticmethod
def write_text(file, content):
with io.open(file, 'wb') as strm:
strm.write(content.encode('utf-8'))
def test_utf8_encoding_retained(self, tmpdir):
"""
When editing a file, non-ASCII characters encoded in
UTF-8 should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\njaraco=джарако')
setopt.edit_config(str(config), dict(names=dict(other='yes')))
parser = self.parse_config(str(config))
assert parser.get('names', 'jaraco') == 'джарако'
assert parser.get('names', 'other') == 'yes'
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
import io
import six
from setuptools.command import setopt
from setuptools.extern.six.moves import configparser
class TestEdit:
@staticmethod
def parse_config(filename):
parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader)
return parser
@staticmethod
def write_text(file, content):
with io.open(file, 'wb') as strm:
strm.write(content.encode('utf-8'))
def test_utf8_encoding_retained(self, tmpdir):
"""
When editing a file, non-ASCII characters encoded in
UTF-8 should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\njaraco=йарацо')
setopt.edit_config(str(config), dict(names=dict(other='yes')))
parser = self.parse_config(str(config))
assert parser.get('names', 'jaraco') == 'йарацо'
assert parser.get('names', 'other') == 'yes'
<commit_msg>Correct cyrillic to match preferred pronunciation.<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals
import io
import six
from setuptools.command import setopt
from setuptools.extern.six.moves import configparser
class TestEdit:
@staticmethod
def parse_config(filename):
parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader)
return parser
@staticmethod
def write_text(file, content):
with io.open(file, 'wb') as strm:
strm.write(content.encode('utf-8'))
def test_utf8_encoding_retained(self, tmpdir):
"""
When editing a file, non-ASCII characters encoded in
UTF-8 should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\njaraco=джарако')
setopt.edit_config(str(config), dict(names=dict(other='yes')))
parser = self.parse_config(str(config))
assert parser.get('names', 'jaraco') == 'джарако'
assert parser.get('names', 'other') == 'yes'
|
# coding: utf-8
from __future__ import unicode_literals
import io
import six
from setuptools.command import setopt
from setuptools.extern.six.moves import configparser
class TestEdit:
@staticmethod
def parse_config(filename):
parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader)
return parser
@staticmethod
def write_text(file, content):
with io.open(file, 'wb') as strm:
strm.write(content.encode('utf-8'))
def test_utf8_encoding_retained(self, tmpdir):
"""
When editing a file, non-ASCII characters encoded in
UTF-8 should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\njaraco=йарацо')
setopt.edit_config(str(config), dict(names=dict(other='yes')))
parser = self.parse_config(str(config))
assert parser.get('names', 'jaraco') == 'йарацо'
assert parser.get('names', 'other') == 'yes'
Correct cyrillic to match preferred pronunciation.# coding: utf-8
from __future__ import unicode_literals
import io
import six
from setuptools.command import setopt
from setuptools.extern.six.moves import configparser
class TestEdit:
@staticmethod
def parse_config(filename):
parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader)
return parser
@staticmethod
def write_text(file, content):
with io.open(file, 'wb') as strm:
strm.write(content.encode('utf-8'))
def test_utf8_encoding_retained(self, tmpdir):
"""
When editing a file, non-ASCII characters encoded in
UTF-8 should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\njaraco=джарако')
setopt.edit_config(str(config), dict(names=dict(other='yes')))
parser = self.parse_config(str(config))
assert parser.get('names', 'jaraco') == 'джарако'
assert parser.get('names', 'other') == 'yes'
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
import io
import six
from setuptools.command import setopt
from setuptools.extern.six.moves import configparser
class TestEdit:
@staticmethod
def parse_config(filename):
parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader)
return parser
@staticmethod
def write_text(file, content):
with io.open(file, 'wb') as strm:
strm.write(content.encode('utf-8'))
def test_utf8_encoding_retained(self, tmpdir):
"""
When editing a file, non-ASCII characters encoded in
UTF-8 should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\njaraco=йарацо')
setopt.edit_config(str(config), dict(names=dict(other='yes')))
parser = self.parse_config(str(config))
assert parser.get('names', 'jaraco') == 'йарацо'
assert parser.get('names', 'other') == 'yes'
<commit_msg>Correct cyrillic to match preferred pronunciation.<commit_after># coding: utf-8
from __future__ import unicode_literals
import io
import six
from setuptools.command import setopt
from setuptools.extern.six.moves import configparser
class TestEdit:
@staticmethod
def parse_config(filename):
parser = configparser.ConfigParser()
with io.open(filename, encoding='utf-8') as reader:
(parser.read_file if six.PY3 else parser.readfp)(reader)
return parser
@staticmethod
def write_text(file, content):
with io.open(file, 'wb') as strm:
strm.write(content.encode('utf-8'))
def test_utf8_encoding_retained(self, tmpdir):
"""
When editing a file, non-ASCII characters encoded in
UTF-8 should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\njaraco=джарако')
setopt.edit_config(str(config), dict(names=dict(other='yes')))
parser = self.parse_config(str(config))
assert parser.get('names', 'jaraco') == 'джарако'
assert parser.get('names', 'other') == 'yes'
|
1d74ba63dda5193a5287a45c9570a7c2ece6fb42
|
moksha/apps/metrics/moksha/apps/metrics/consumers/metrics_consumer.py
|
moksha/apps/metrics/moksha/apps/metrics/consumers/metrics_consumer.py
|
# This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Moksha Metrics Consumer
=======================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['topic'], message['data'])
|
# This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Moksha Metrics Consumer
=======================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['body']['topic'], message['body']['data'])
|
Fix the data format of our metrics consumer
|
Fix the data format of our metrics consumer
|
Python
|
apache-2.0
|
mokshaproject/moksha,mokshaproject/moksha,pombredanne/moksha,ralphbean/moksha,ralphbean/moksha,pombredanne/moksha,pombredanne/moksha,lmacken/moksha,lmacken/moksha,ralphbean/moksha,lmacken/moksha,mokshaproject/moksha,mokshaproject/moksha,pombredanne/moksha
|
# This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Moksha Metrics Consumer
=======================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['topic'], message['data'])
Fix the data format of our metrics consumer
|
# This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Moksha Metrics Consumer
=======================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['body']['topic'], message['body']['data'])
|
<commit_before># This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Moksha Metrics Consumer
=======================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['topic'], message['data'])
<commit_msg>Fix the data format of our metrics consumer<commit_after>
|
# This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Moksha Metrics Consumer
=======================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['body']['topic'], message['body']['data'])
|
# This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Moksha Metrics Consumer
=======================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['topic'], message['data'])
Fix the data format of our metrics consumer# This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Moksha Metrics Consumer
=======================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['body']['topic'], message['body']['data'])
|
<commit_before># This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Moksha Metrics Consumer
=======================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['topic'], message['data'])
<commit_msg>Fix the data format of our metrics consumer<commit_after># This file is part of Moksha.
# Copyright (C) 2008-2009 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Moksha Metrics Consumer
=======================
.. moduleauthor:: Luke Macken <lmacken@redhat.com>
"""
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['body']['topic'], message['body']['data'])
|
8257a9478fcf30c28bce91a8f12b63d8e1dab955
|
readinglist2pocket.py
|
readinglist2pocket.py
|
#!/usr/bin/env python
# Requires https://github.com/samuelkordik/pocketlib
from readinglistlib import ReadingListReader
from pocket.pocket import Pocket
import argparse
import sys
# Configure and consume command line arguments.
ap = argparse.ArgumentParser(description='This script adds your Safari Reading List articles to Pocket.')
ap.add_argument('-v', '--verbose', action='store_true', help='Print article URLs as they are added.')
args = ap.parse_args()
consumer_key = ""
redirect_uri = ""
# Initialize Pocket API
request_token = Pocket.get_request_token(consumer_key=consumer_key, redirect_uri=redirect_uri)
auth_url = Pocket.get_auth_url(code=request_token, redirect_uri=redirect_uri)
user_credentials = Pocket.get_credentials(consumer_key=consumer_key, code=request_token)
access_token = user_credentials['access_token']
# Get the Reading List items
rlr = ReadingListReader()
articles = rlr.read(show="unread")
for article in articles:
(add_status, add_message) = Pocket.add(article['url'].encode('utf-8'), title=article['title'].encode('utf-8'), tags='reading_list')
if 200 == add_status:
if args.verbose:
print article['url'].encode('utf-8')
else:
print >> sys.stderr, add_message
ap.exit(-1)
|
#!/usr/bin/env python
# Requires https://github.com/samuelkordik/pocketlib
from readinglistlib import ReadingListReader
from pocket.pocket import Pocket
import argparse
import sys
# Configure and consume command line arguments.
ap = argparse.ArgumentParser(description='This script adds your Safari Reading List articles to Pocket.')
ap.add_argument('-v', '--verbose', action='store_true', help='Print article URLs as they are added.')
args = ap.parse_args()
consumer_key = "" # Insert your consumer key here (https://getpocket.com/developer/apps/)
redirect_uri = "" # TODO: Currently obselete/phishing threat in this version
# Manually trigger pocket authentication
access_token = Pocket.auth(consumer_key=consumer_key, redirect_uri=redirect_uri)
pocket_instance = Pocket(consumer_key, access_token)
# Get the Reading List items
rlr = ReadingListReader()
articles = rlr.read(show="unread")
for article in articles:
print pocket_instance.bulk_add(url=article['url'].encode('utf-8'), tags='reading_list')
print "Added:", article['url']
# commit bulk_add changes
pocket_instance.commit()
|
Add functional Pocket auth and bulk_add
|
Add functional Pocket auth and bulk_add
|
Python
|
mit
|
anoved/ReadingListReader
|
#!/usr/bin/env python
# Requires https://github.com/samuelkordik/pocketlib
from readinglistlib import ReadingListReader
from pocket.pocket import Pocket
import argparse
import sys
# Configure and consume command line arguments.
ap = argparse.ArgumentParser(description='This script adds your Safari Reading List articles to Pocket.')
ap.add_argument('-v', '--verbose', action='store_true', help='Print article URLs as they are added.')
args = ap.parse_args()
consumer_key = ""
redirect_uri = ""
# Initialize Pocket API
request_token = Pocket.get_request_token(consumer_key=consumer_key, redirect_uri=redirect_uri)
auth_url = Pocket.get_auth_url(code=request_token, redirect_uri=redirect_uri)
user_credentials = Pocket.get_credentials(consumer_key=consumer_key, code=request_token)
access_token = user_credentials['access_token']
# Get the Reading List items
rlr = ReadingListReader()
articles = rlr.read(show="unread")
for article in articles:
(add_status, add_message) = Pocket.add(article['url'].encode('utf-8'), title=article['title'].encode('utf-8'), tags='reading_list')
if 200 == add_status:
if args.verbose:
print article['url'].encode('utf-8')
else:
print >> sys.stderr, add_message
ap.exit(-1)
Add functional Pocket auth and bulk_add
|
#!/usr/bin/env python
# Requires https://github.com/samuelkordik/pocketlib
from readinglistlib import ReadingListReader
from pocket.pocket import Pocket
import argparse
import sys
# Configure and consume command line arguments.
ap = argparse.ArgumentParser(description='This script adds your Safari Reading List articles to Pocket.')
ap.add_argument('-v', '--verbose', action='store_true', help='Print article URLs as they are added.')
args = ap.parse_args()
consumer_key = "" # Insert your consumer key here (https://getpocket.com/developer/apps/)
redirect_uri = "" # TODO: Currently obselete/phishing threat in this version
# Manually trigger pocket authentication
access_token = Pocket.auth(consumer_key=consumer_key, redirect_uri=redirect_uri)
pocket_instance = Pocket(consumer_key, access_token)
# Get the Reading List items
rlr = ReadingListReader()
articles = rlr.read(show="unread")
for article in articles:
print pocket_instance.bulk_add(url=article['url'].encode('utf-8'), tags='reading_list')
print "Added:", article['url']
# commit bulk_add changes
pocket_instance.commit()
|
<commit_before>#!/usr/bin/env python
# Requires https://github.com/samuelkordik/pocketlib
from readinglistlib import ReadingListReader
from pocket.pocket import Pocket
import argparse
import sys
# Configure and consume command line arguments.
ap = argparse.ArgumentParser(description='This script adds your Safari Reading List articles to Pocket.')
ap.add_argument('-v', '--verbose', action='store_true', help='Print article URLs as they are added.')
args = ap.parse_args()
consumer_key = ""
redirect_uri = ""
# Initialize Pocket API
request_token = Pocket.get_request_token(consumer_key=consumer_key, redirect_uri=redirect_uri)
auth_url = Pocket.get_auth_url(code=request_token, redirect_uri=redirect_uri)
user_credentials = Pocket.get_credentials(consumer_key=consumer_key, code=request_token)
access_token = user_credentials['access_token']
# Get the Reading List items
rlr = ReadingListReader()
articles = rlr.read(show="unread")
for article in articles:
(add_status, add_message) = Pocket.add(article['url'].encode('utf-8'), title=article['title'].encode('utf-8'), tags='reading_list')
if 200 == add_status:
if args.verbose:
print article['url'].encode('utf-8')
else:
print >> sys.stderr, add_message
ap.exit(-1)
<commit_msg>Add functional Pocket auth and bulk_add<commit_after>
|
#!/usr/bin/env python
# Requires https://github.com/samuelkordik/pocketlib
from readinglistlib import ReadingListReader
from pocket.pocket import Pocket
import argparse
import sys
# Configure and consume command line arguments.
ap = argparse.ArgumentParser(description='This script adds your Safari Reading List articles to Pocket.')
ap.add_argument('-v', '--verbose', action='store_true', help='Print article URLs as they are added.')
args = ap.parse_args()
consumer_key = "" # Insert your consumer key here (https://getpocket.com/developer/apps/)
redirect_uri = "" # TODO: Currently obselete/phishing threat in this version
# Manually trigger pocket authentication
access_token = Pocket.auth(consumer_key=consumer_key, redirect_uri=redirect_uri)
pocket_instance = Pocket(consumer_key, access_token)
# Get the Reading List items
rlr = ReadingListReader()
articles = rlr.read(show="unread")
for article in articles:
print pocket_instance.bulk_add(url=article['url'].encode('utf-8'), tags='reading_list')
print "Added:", article['url']
# commit bulk_add changes
pocket_instance.commit()
|
#!/usr/bin/env python
# Requires https://github.com/samuelkordik/pocketlib
from readinglistlib import ReadingListReader
from pocket.pocket import Pocket
import argparse
import sys
# Configure and consume command line arguments.
ap = argparse.ArgumentParser(description='This script adds your Safari Reading List articles to Pocket.')
ap.add_argument('-v', '--verbose', action='store_true', help='Print article URLs as they are added.')
args = ap.parse_args()
consumer_key = ""
redirect_uri = ""
# Initialize Pocket API
request_token = Pocket.get_request_token(consumer_key=consumer_key, redirect_uri=redirect_uri)
auth_url = Pocket.get_auth_url(code=request_token, redirect_uri=redirect_uri)
user_credentials = Pocket.get_credentials(consumer_key=consumer_key, code=request_token)
access_token = user_credentials['access_token']
# Get the Reading List items
rlr = ReadingListReader()
articles = rlr.read(show="unread")
for article in articles:
(add_status, add_message) = Pocket.add(article['url'].encode('utf-8'), title=article['title'].encode('utf-8'), tags='reading_list')
if 200 == add_status:
if args.verbose:
print article['url'].encode('utf-8')
else:
print >> sys.stderr, add_message
ap.exit(-1)
Add functional Pocket auth and bulk_add#!/usr/bin/env python
# Requires https://github.com/samuelkordik/pocketlib
from readinglistlib import ReadingListReader
from pocket.pocket import Pocket
import argparse
import sys
# Configure and consume command line arguments.
ap = argparse.ArgumentParser(description='This script adds your Safari Reading List articles to Pocket.')
ap.add_argument('-v', '--verbose', action='store_true', help='Print article URLs as they are added.')
args = ap.parse_args()
consumer_key = "" # Insert your consumer key here (https://getpocket.com/developer/apps/)
redirect_uri = "" # TODO: Currently obselete/phishing threat in this version
# Manually trigger pocket authentication
access_token = Pocket.auth(consumer_key=consumer_key, redirect_uri=redirect_uri)
pocket_instance = Pocket(consumer_key, access_token)
# Get the Reading List items
rlr = ReadingListReader()
articles = rlr.read(show="unread")
for article in articles:
print pocket_instance.bulk_add(url=article['url'].encode('utf-8'), tags='reading_list')
print "Added:", article['url']
# commit bulk_add changes
pocket_instance.commit()
|
<commit_before>#!/usr/bin/env python
# Requires https://github.com/samuelkordik/pocketlib
from readinglistlib import ReadingListReader
from pocket.pocket import Pocket
import argparse
import sys
# Configure and consume command line arguments.
ap = argparse.ArgumentParser(description='This script adds your Safari Reading List articles to Pocket.')
ap.add_argument('-v', '--verbose', action='store_true', help='Print article URLs as they are added.')
args = ap.parse_args()
consumer_key = ""
redirect_uri = ""
# Initialize Pocket API
request_token = Pocket.get_request_token(consumer_key=consumer_key, redirect_uri=redirect_uri)
auth_url = Pocket.get_auth_url(code=request_token, redirect_uri=redirect_uri)
user_credentials = Pocket.get_credentials(consumer_key=consumer_key, code=request_token)
access_token = user_credentials['access_token']
# Get the Reading List items
rlr = ReadingListReader()
articles = rlr.read(show="unread")
for article in articles:
(add_status, add_message) = Pocket.add(article['url'].encode('utf-8'), title=article['title'].encode('utf-8'), tags='reading_list')
if 200 == add_status:
if args.verbose:
print article['url'].encode('utf-8')
else:
print >> sys.stderr, add_message
ap.exit(-1)
<commit_msg>Add functional Pocket auth and bulk_add<commit_after>#!/usr/bin/env python
# Requires https://github.com/samuelkordik/pocketlib
from readinglistlib import ReadingListReader
from pocket.pocket import Pocket
import argparse
import sys
# Configure and consume command line arguments.
ap = argparse.ArgumentParser(description='This script adds your Safari Reading List articles to Pocket.')
ap.add_argument('-v', '--verbose', action='store_true', help='Print article URLs as they are added.')
args = ap.parse_args()
consumer_key = "" # Insert your consumer key here (https://getpocket.com/developer/apps/)
redirect_uri = "" # TODO: Currently obselete/phishing threat in this version
# Manually trigger pocket authentication
access_token = Pocket.auth(consumer_key=consumer_key, redirect_uri=redirect_uri)
pocket_instance = Pocket(consumer_key, access_token)
# Get the Reading List items
rlr = ReadingListReader()
articles = rlr.read(show="unread")
for article in articles:
print pocket_instance.bulk_add(url=article['url'].encode('utf-8'), tags='reading_list')
print "Added:", article['url']
# commit bulk_add changes
pocket_instance.commit()
|
8ddb8217759a18874b9b147cbe77a0103556251e
|
order/order_2_login_system_by_https.py
|
order/order_2_login_system_by_https.py
|
import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'http://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
|
import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'https://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
|
Order 2: Login system by https
|
[Order] Order 2: Login system by https
|
Python
|
mit
|
flyingSprite/spinelle
|
import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'http://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
[Order] Order 2: Login system by https
|
import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'https://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
|
<commit_before>import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'http://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
<commit_msg>[Order] Order 2: Login system by https<commit_after>
|
import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'https://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
|
import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'http://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
[Order] Order 2: Login system by httpsimport json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'https://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
|
<commit_before>import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'http://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
<commit_msg>[Order] Order 2: Login system by https<commit_after>import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'https://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
|
c5d0595acb080bdc33efdc95a5781ed6b87b0a2e
|
warehouse/packages/models.py
|
warehouse/packages/models.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy import event
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from warehouse import db
from warehouse.databases.mixins import UUIDPrimaryKeyMixin
from warehouse.database.types import CIText
class Project(UUIDPrimaryKeyMixin, db.Model):
__tablename__ = "projects"
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
# Create the Trigger to fill in normalized (TODO: Move this to Alembic?)
event.listen(Project.__table__, "after_create", db.DDL("""
CREATE OR REPLACE FUNCTION normalize_name() RETURNS trigger AS
$body$
BEGIN
new.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
RETURN new;
end;
$body$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""))
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.ext.declarative import declared_attr
from warehouse import db
from warehouse.database.mixins import UUIDPrimaryKeyMixin, TimeStampedMixin
from warehouse.database.schema import TableDDL
from warehouse.database.types import CIText
from warehouse.database.utils import table_args
class Project(UUIDPrimaryKeyMixin, TimeStampedMixin, db.Model):
__tablename__ = "projects"
__table_args__ = declared_attr(table_args((
TableDDL("""
CREATE OR REPLACE FUNCTION normalize_name()
RETURNS trigger AS $$
BEGIN
NEW.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
return NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""),
)))
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
|
Refactor Project to use new mixins and methods
|
Refactor Project to use new mixins and methods
|
Python
|
bsd-2-clause
|
davidfischer/warehouse
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy import event
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from warehouse import db
from warehouse.databases.mixins import UUIDPrimaryKeyMixin
from warehouse.database.types import CIText
class Project(UUIDPrimaryKeyMixin, db.Model):
__tablename__ = "projects"
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
# Create the Trigger to fill in normalized (TODO: Move this to Alembic?)
event.listen(Project.__table__, "after_create", db.DDL("""
CREATE OR REPLACE FUNCTION normalize_name() RETURNS trigger AS
$body$
BEGIN
new.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
RETURN new;
end;
$body$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""))
Refactor Project to use new mixins and methods
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.ext.declarative import declared_attr
from warehouse import db
from warehouse.database.mixins import UUIDPrimaryKeyMixin, TimeStampedMixin
from warehouse.database.schema import TableDDL
from warehouse.database.types import CIText
from warehouse.database.utils import table_args
class Project(UUIDPrimaryKeyMixin, TimeStampedMixin, db.Model):
__tablename__ = "projects"
__table_args__ = declared_attr(table_args((
TableDDL("""
CREATE OR REPLACE FUNCTION normalize_name()
RETURNS trigger AS $$
BEGIN
NEW.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
return NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""),
)))
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy import event
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from warehouse import db
from warehouse.databases.mixins import UUIDPrimaryKeyMixin
from warehouse.database.types import CIText
class Project(UUIDPrimaryKeyMixin, db.Model):
__tablename__ = "projects"
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
# Create the Trigger to fill in normalized (TODO: Move this to Alembic?)
event.listen(Project.__table__, "after_create", db.DDL("""
CREATE OR REPLACE FUNCTION normalize_name() RETURNS trigger AS
$body$
BEGIN
new.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
RETURN new;
end;
$body$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""))
<commit_msg>Refactor Project to use new mixins and methods<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.ext.declarative import declared_attr
from warehouse import db
from warehouse.database.mixins import UUIDPrimaryKeyMixin, TimeStampedMixin
from warehouse.database.schema import TableDDL
from warehouse.database.types import CIText
from warehouse.database.utils import table_args
class Project(UUIDPrimaryKeyMixin, TimeStampedMixin, db.Model):
__tablename__ = "projects"
__table_args__ = declared_attr(table_args((
TableDDL("""
CREATE OR REPLACE FUNCTION normalize_name()
RETURNS trigger AS $$
BEGIN
NEW.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
return NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""),
)))
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy import event
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from warehouse import db
from warehouse.databases.mixins import UUIDPrimaryKeyMixin
from warehouse.database.types import CIText
class Project(UUIDPrimaryKeyMixin, db.Model):
__tablename__ = "projects"
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
# Create the Trigger to fill in normalized (TODO: Move this to Alembic?)
event.listen(Project.__table__, "after_create", db.DDL("""
CREATE OR REPLACE FUNCTION normalize_name() RETURNS trigger AS
$body$
BEGIN
new.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
RETURN new;
end;
$body$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""))
Refactor Project to use new mixins and methodsfrom __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.ext.declarative import declared_attr
from warehouse import db
from warehouse.database.mixins import UUIDPrimaryKeyMixin, TimeStampedMixin
from warehouse.database.schema import TableDDL
from warehouse.database.types import CIText
from warehouse.database.utils import table_args
class Project(UUIDPrimaryKeyMixin, TimeStampedMixin, db.Model):
__tablename__ = "projects"
__table_args__ = declared_attr(table_args((
TableDDL("""
CREATE OR REPLACE FUNCTION normalize_name()
RETURNS trigger AS $$
BEGIN
NEW.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
return NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""),
)))
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy import event
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from warehouse import db
from warehouse.databases.mixins import UUIDPrimaryKeyMixin
from warehouse.database.types import CIText
class Project(UUIDPrimaryKeyMixin, db.Model):
__tablename__ = "projects"
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
# Create the Trigger to fill in normalized (TODO: Move this to Alembic?)
event.listen(Project.__table__, "after_create", db.DDL("""
CREATE OR REPLACE FUNCTION normalize_name() RETURNS trigger AS
$body$
BEGIN
new.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
RETURN new;
end;
$body$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""))
<commit_msg>Refactor Project to use new mixins and methods<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from sqlalchemy.schema import FetchedValue
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.ext.declarative import declared_attr
from warehouse import db
from warehouse.database.mixins import UUIDPrimaryKeyMixin, TimeStampedMixin
from warehouse.database.schema import TableDDL
from warehouse.database.types import CIText
from warehouse.database.utils import table_args
class Project(UUIDPrimaryKeyMixin, TimeStampedMixin, db.Model):
__tablename__ = "projects"
__table_args__ = declared_attr(table_args((
TableDDL("""
CREATE OR REPLACE FUNCTION normalize_name()
RETURNS trigger AS $$
BEGIN
NEW.normalized = lower(regexp_replace(new.name, '[^A-Za-z0-9.]+', '-'));
return NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_normalize_name
BEFORE INSERT OR UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE normalize_name();
"""),
)))
name = db.Column(CIText, unique=True, nullable=False)
normalized = db.Column(CIText, unique=True, nullable=False,
server_default=FetchedValue(),
server_onupdate=FetchedValue())
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Project: {name}>".format(name=self.name)
|
f44bd61809d2d965359ad4795b3839aa9a56bfec
|
src/sentry/monkey.py
|
src/sentry/monkey.py
|
from __future__ import absolute_import
def register_scheme(name):
from six.moves.urllib import parse as urlparse
uses = urlparse.uses_netloc, urlparse.uses_query, urlparse.uses_relative, urlparse.uses_fragment
for use in uses:
if name not in use:
use.append(name)
register_scheme('app')
|
from __future__ import absolute_import
def register_scheme(name):
try:
import urlparse # NOQA
except ImportError:
from urllib import parse as urlparse # NOQA
uses = urlparse.uses_netloc, urlparse.uses_query, urlparse.uses_relative, urlparse.uses_fragment
for use in uses:
if name not in use:
use.append(name)
register_scheme('app')
|
Remove six.moves and disable linter
|
Remove six.moves and disable linter
|
Python
|
bsd-3-clause
|
gencer/sentry,ifduyue/sentry,jean/sentry,looker/sentry,jean/sentry,gencer/sentry,mvaled/sentry,JamesMura/sentry,JamesMura/sentry,JamesMura/sentry,jean/sentry,BuildingLink/sentry,looker/sentry,looker/sentry,beeftornado/sentry,JackDanger/sentry,BuildingLink/sentry,JackDanger/sentry,jean/sentry,mvaled/sentry,beeftornado/sentry,jean/sentry,beeftornado/sentry,gencer/sentry,ifduyue/sentry,mvaled/sentry,looker/sentry,BuildingLink/sentry,JackDanger/sentry,BuildingLink/sentry,BuildingLink/sentry,JamesMura/sentry,ifduyue/sentry,mvaled/sentry,gencer/sentry,looker/sentry,mvaled/sentry,mvaled/sentry,JamesMura/sentry,ifduyue/sentry,ifduyue/sentry,gencer/sentry
|
from __future__ import absolute_import
def register_scheme(name):
from six.moves.urllib import parse as urlparse
uses = urlparse.uses_netloc, urlparse.uses_query, urlparse.uses_relative, urlparse.uses_fragment
for use in uses:
if name not in use:
use.append(name)
register_scheme('app')
Remove six.moves and disable linter
|
from __future__ import absolute_import
def register_scheme(name):
try:
import urlparse # NOQA
except ImportError:
from urllib import parse as urlparse # NOQA
uses = urlparse.uses_netloc, urlparse.uses_query, urlparse.uses_relative, urlparse.uses_fragment
for use in uses:
if name not in use:
use.append(name)
register_scheme('app')
|
<commit_before>from __future__ import absolute_import
def register_scheme(name):
from six.moves.urllib import parse as urlparse
uses = urlparse.uses_netloc, urlparse.uses_query, urlparse.uses_relative, urlparse.uses_fragment
for use in uses:
if name not in use:
use.append(name)
register_scheme('app')
<commit_msg>Remove six.moves and disable linter<commit_after>
|
from __future__ import absolute_import
def register_scheme(name):
try:
import urlparse # NOQA
except ImportError:
from urllib import parse as urlparse # NOQA
uses = urlparse.uses_netloc, urlparse.uses_query, urlparse.uses_relative, urlparse.uses_fragment
for use in uses:
if name not in use:
use.append(name)
register_scheme('app')
|
from __future__ import absolute_import
def register_scheme(name):
from six.moves.urllib import parse as urlparse
uses = urlparse.uses_netloc, urlparse.uses_query, urlparse.uses_relative, urlparse.uses_fragment
for use in uses:
if name not in use:
use.append(name)
register_scheme('app')
Remove six.moves and disable linterfrom __future__ import absolute_import
def register_scheme(name):
try:
import urlparse # NOQA
except ImportError:
from urllib import parse as urlparse # NOQA
uses = urlparse.uses_netloc, urlparse.uses_query, urlparse.uses_relative, urlparse.uses_fragment
for use in uses:
if name not in use:
use.append(name)
register_scheme('app')
|
<commit_before>from __future__ import absolute_import
def register_scheme(name):
from six.moves.urllib import parse as urlparse
uses = urlparse.uses_netloc, urlparse.uses_query, urlparse.uses_relative, urlparse.uses_fragment
for use in uses:
if name not in use:
use.append(name)
register_scheme('app')
<commit_msg>Remove six.moves and disable linter<commit_after>from __future__ import absolute_import
def register_scheme(name):
try:
import urlparse # NOQA
except ImportError:
from urllib import parse as urlparse # NOQA
uses = urlparse.uses_netloc, urlparse.uses_query, urlparse.uses_relative, urlparse.uses_fragment
for use in uses:
if name not in use:
use.append(name)
register_scheme('app')
|
87a4c494c18039a296775dab8acf910f83fb59b8
|
djangoappengine/utils.py
|
djangoappengine/utils.py
|
from google.appengine.api import apiproxy_stub_map
import os
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = os.environ.get('APPLICATION_ID')
else:
try:
from google.appengine.tools import dev_appserver
from .boot import PROJECT_DIR
appconfig, unused = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
appid = appconfig.application
except ImportError, e:
raise Exception('Could not get appid. Is your app.yaml file missing? '
'Error was: %s' % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
from google.appengine.api import apiproxy_stub_map
import os
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = os.environ.get('APPLICATION_ID')
else:
try:
from google.appengine.tools import dev_appserver
from .boot import PROJECT_DIR
try:
appconfig, unused = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
except ValueError:
# https://bitbucket.org/wkornewald/django-nonrel/issue/13/managepy-test-broken-with-gae-sdk-16
appconfig, unused, from_cache = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
appid = appconfig.application
except ImportError, e:
raise Exception('Could not get appid. Is your app.yaml file missing? '
'Error was: %s' % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
Make prosthetic-runner work with GAE SDK 1.6
|
Make prosthetic-runner work with GAE SDK 1.6
|
Python
|
mit
|
philterphactory/prosthetic-runner,philterphactory/prosthetic-runner,philterphactory/prosthetic-runner
|
from google.appengine.api import apiproxy_stub_map
import os
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = os.environ.get('APPLICATION_ID')
else:
try:
from google.appengine.tools import dev_appserver
from .boot import PROJECT_DIR
appconfig, unused = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
appid = appconfig.application
except ImportError, e:
raise Exception('Could not get appid. Is your app.yaml file missing? '
'Error was: %s' % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
Make prosthetic-runner work with GAE SDK 1.6
|
from google.appengine.api import apiproxy_stub_map
import os
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = os.environ.get('APPLICATION_ID')
else:
try:
from google.appengine.tools import dev_appserver
from .boot import PROJECT_DIR
try:
appconfig, unused = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
except ValueError:
# https://bitbucket.org/wkornewald/django-nonrel/issue/13/managepy-test-broken-with-gae-sdk-16
appconfig, unused, from_cache = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
appid = appconfig.application
except ImportError, e:
raise Exception('Could not get appid. Is your app.yaml file missing? '
'Error was: %s' % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
<commit_before>from google.appengine.api import apiproxy_stub_map
import os
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = os.environ.get('APPLICATION_ID')
else:
try:
from google.appengine.tools import dev_appserver
from .boot import PROJECT_DIR
appconfig, unused = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
appid = appconfig.application
except ImportError, e:
raise Exception('Could not get appid. Is your app.yaml file missing? '
'Error was: %s' % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
<commit_msg>Make prosthetic-runner work with GAE SDK 1.6<commit_after>
|
from google.appengine.api import apiproxy_stub_map
import os
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = os.environ.get('APPLICATION_ID')
else:
try:
from google.appengine.tools import dev_appserver
from .boot import PROJECT_DIR
try:
appconfig, unused = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
except ValueError:
# https://bitbucket.org/wkornewald/django-nonrel/issue/13/managepy-test-broken-with-gae-sdk-16
appconfig, unused, from_cache = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
appid = appconfig.application
except ImportError, e:
raise Exception('Could not get appid. Is your app.yaml file missing? '
'Error was: %s' % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
from google.appengine.api import apiproxy_stub_map
import os
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = os.environ.get('APPLICATION_ID')
else:
try:
from google.appengine.tools import dev_appserver
from .boot import PROJECT_DIR
appconfig, unused = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
appid = appconfig.application
except ImportError, e:
raise Exception('Could not get appid. Is your app.yaml file missing? '
'Error was: %s' % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
Make prosthetic-runner work with GAE SDK 1.6from google.appengine.api import apiproxy_stub_map
import os
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = os.environ.get('APPLICATION_ID')
else:
try:
from google.appengine.tools import dev_appserver
from .boot import PROJECT_DIR
try:
appconfig, unused = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
except ValueError:
# https://bitbucket.org/wkornewald/django-nonrel/issue/13/managepy-test-broken-with-gae-sdk-16
appconfig, unused, from_cache = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
appid = appconfig.application
except ImportError, e:
raise Exception('Could not get appid. Is your app.yaml file missing? '
'Error was: %s' % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
<commit_before>from google.appengine.api import apiproxy_stub_map
import os
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = os.environ.get('APPLICATION_ID')
else:
try:
from google.appengine.tools import dev_appserver
from .boot import PROJECT_DIR
appconfig, unused = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
appid = appconfig.application
except ImportError, e:
raise Exception('Could not get appid. Is your app.yaml file missing? '
'Error was: %s' % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
<commit_msg>Make prosthetic-runner work with GAE SDK 1.6<commit_after>from google.appengine.api import apiproxy_stub_map
import os
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = os.environ.get('APPLICATION_ID')
else:
try:
from google.appengine.tools import dev_appserver
from .boot import PROJECT_DIR
try:
appconfig, unused = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
except ValueError:
# https://bitbucket.org/wkornewald/django-nonrel/issue/13/managepy-test-broken-with-gae-sdk-16
appconfig, unused, from_cache = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
appid = appconfig.application
except ImportError, e:
raise Exception('Could not get appid. Is your app.yaml file missing? '
'Error was: %s' % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
076e0a765958101f22acc04f313895dc67fdbc9f
|
tests/test_project/settings.py
|
tests/test_project/settings.py
|
# Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
|
# Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'mongoengine.django.mongo_auth',
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
|
Set Mongoengine user for Django 1.5 in tests.
|
Set Mongoengine user for Django 1.5 in tests.
|
Python
|
agpl-3.0
|
wlanslovenija/django-tastypie-mongoengine
|
# Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
Set Mongoengine user for Django 1.5 in tests.
|
# Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'mongoengine.django.mongo_auth',
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
|
<commit_before># Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
<commit_msg>Set Mongoengine user for Django 1.5 in tests.<commit_after>
|
# Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'mongoengine.django.mongo_auth',
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
|
# Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
Set Mongoengine user for Django 1.5 in tests.# Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'mongoengine.django.mongo_auth',
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
|
<commit_before># Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
<commit_msg>Set Mongoengine user for Django 1.5 in tests.<commit_after># Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'mongoengine.django.mongo_auth',
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
|
0e1c9c09bdf60fce3e9dbb8051db079687709fe0
|
blah/commands.py
|
blah/commands.py
|
import os
import blah.repositories
import blah.fetcher
class WhatIsThisCommand(object):
def create_parser(self, subparser):
subparser.add_argument("directory", nargs="?")
def execute(self, args):
directory = args.directory if args.directory is not None else os.getcwd()
repository = blah.repositories.find_repository(directory)
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.working_directory)
what_is_this_command = WhatIsThisCommand()
class FetchCommand(object):
def create_parser(self, subparser):
subparser.add_argument("repository_uri", metavar="repository-uri")
subparser.add_argument("local_path", metavar="local-path")
subparser.add_argument("--use-cache", default=False)
def execute(self, args):
blah.fetcher.fetch(args.repository_uri, args.local_path, args.use_cache)
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command,
"fetch": FetchCommand()
}
|
import os
import argparse
import blah.repositories
import blah.fetcher
class WhatIsThisCommand(object):
def create_parser(self, subparser):
subparser.add_argument("directory", nargs="?")
def execute(self, args):
directory = args.directory if args.directory is not None else os.getcwd()
repository = blah.repositories.find_repository(directory)
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.working_directory)
what_is_this_command = WhatIsThisCommand()
class FetchCommand(object):
def create_parser(self, subparser):
subparser.add_argument("repository_uri", metavar="repository-uri")
subparser.add_argument("local_path", metavar="local-path")
subparser.add_argument("--use-cache", default=False, help=argparse.SUPPRESS, action="store_true")
def execute(self, args):
blah.fetcher.fetch(args.repository_uri, args.local_path, args.use_cache)
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command,
"fetch": FetchCommand()
}
|
Fix and hide --use-cache option
|
Fix and hide --use-cache option
|
Python
|
bsd-2-clause
|
mwilliamson/mayo
|
import os
import blah.repositories
import blah.fetcher
class WhatIsThisCommand(object):
def create_parser(self, subparser):
subparser.add_argument("directory", nargs="?")
def execute(self, args):
directory = args.directory if args.directory is not None else os.getcwd()
repository = blah.repositories.find_repository(directory)
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.working_directory)
what_is_this_command = WhatIsThisCommand()
class FetchCommand(object):
def create_parser(self, subparser):
subparser.add_argument("repository_uri", metavar="repository-uri")
subparser.add_argument("local_path", metavar="local-path")
subparser.add_argument("--use-cache", default=False)
def execute(self, args):
blah.fetcher.fetch(args.repository_uri, args.local_path, args.use_cache)
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command,
"fetch": FetchCommand()
}
Fix and hide --use-cache option
|
import os
import argparse
import blah.repositories
import blah.fetcher
class WhatIsThisCommand(object):
def create_parser(self, subparser):
subparser.add_argument("directory", nargs="?")
def execute(self, args):
directory = args.directory if args.directory is not None else os.getcwd()
repository = blah.repositories.find_repository(directory)
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.working_directory)
what_is_this_command = WhatIsThisCommand()
class FetchCommand(object):
def create_parser(self, subparser):
subparser.add_argument("repository_uri", metavar="repository-uri")
subparser.add_argument("local_path", metavar="local-path")
subparser.add_argument("--use-cache", default=False, help=argparse.SUPPRESS, action="store_true")
def execute(self, args):
blah.fetcher.fetch(args.repository_uri, args.local_path, args.use_cache)
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command,
"fetch": FetchCommand()
}
|
<commit_before>import os
import blah.repositories
import blah.fetcher
class WhatIsThisCommand(object):
def create_parser(self, subparser):
subparser.add_argument("directory", nargs="?")
def execute(self, args):
directory = args.directory if args.directory is not None else os.getcwd()
repository = blah.repositories.find_repository(directory)
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.working_directory)
what_is_this_command = WhatIsThisCommand()
class FetchCommand(object):
def create_parser(self, subparser):
subparser.add_argument("repository_uri", metavar="repository-uri")
subparser.add_argument("local_path", metavar="local-path")
subparser.add_argument("--use-cache", default=False)
def execute(self, args):
blah.fetcher.fetch(args.repository_uri, args.local_path, args.use_cache)
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command,
"fetch": FetchCommand()
}
<commit_msg>Fix and hide --use-cache option<commit_after>
|
import os
import argparse
import blah.repositories
import blah.fetcher
class WhatIsThisCommand(object):
def create_parser(self, subparser):
subparser.add_argument("directory", nargs="?")
def execute(self, args):
directory = args.directory if args.directory is not None else os.getcwd()
repository = blah.repositories.find_repository(directory)
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.working_directory)
what_is_this_command = WhatIsThisCommand()
class FetchCommand(object):
def create_parser(self, subparser):
subparser.add_argument("repository_uri", metavar="repository-uri")
subparser.add_argument("local_path", metavar="local-path")
subparser.add_argument("--use-cache", default=False, help=argparse.SUPPRESS, action="store_true")
def execute(self, args):
blah.fetcher.fetch(args.repository_uri, args.local_path, args.use_cache)
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command,
"fetch": FetchCommand()
}
|
import os
import blah.repositories
import blah.fetcher
class WhatIsThisCommand(object):
def create_parser(self, subparser):
subparser.add_argument("directory", nargs="?")
def execute(self, args):
directory = args.directory if args.directory is not None else os.getcwd()
repository = blah.repositories.find_repository(directory)
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.working_directory)
what_is_this_command = WhatIsThisCommand()
class FetchCommand(object):
def create_parser(self, subparser):
subparser.add_argument("repository_uri", metavar="repository-uri")
subparser.add_argument("local_path", metavar="local-path")
subparser.add_argument("--use-cache", default=False)
def execute(self, args):
blah.fetcher.fetch(args.repository_uri, args.local_path, args.use_cache)
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command,
"fetch": FetchCommand()
}
Fix and hide --use-cache optionimport os
import argparse
import blah.repositories
import blah.fetcher
class WhatIsThisCommand(object):
def create_parser(self, subparser):
subparser.add_argument("directory", nargs="?")
def execute(self, args):
directory = args.directory if args.directory is not None else os.getcwd()
repository = blah.repositories.find_repository(directory)
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.working_directory)
what_is_this_command = WhatIsThisCommand()
class FetchCommand(object):
def create_parser(self, subparser):
subparser.add_argument("repository_uri", metavar="repository-uri")
subparser.add_argument("local_path", metavar="local-path")
subparser.add_argument("--use-cache", default=False, help=argparse.SUPPRESS, action="store_true")
def execute(self, args):
blah.fetcher.fetch(args.repository_uri, args.local_path, args.use_cache)
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command,
"fetch": FetchCommand()
}
|
<commit_before>import os
import blah.repositories
import blah.fetcher
class WhatIsThisCommand(object):
def create_parser(self, subparser):
subparser.add_argument("directory", nargs="?")
def execute(self, args):
directory = args.directory if args.directory is not None else os.getcwd()
repository = blah.repositories.find_repository(directory)
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.working_directory)
what_is_this_command = WhatIsThisCommand()
class FetchCommand(object):
def create_parser(self, subparser):
subparser.add_argument("repository_uri", metavar="repository-uri")
subparser.add_argument("local_path", metavar="local-path")
subparser.add_argument("--use-cache", default=False)
def execute(self, args):
blah.fetcher.fetch(args.repository_uri, args.local_path, args.use_cache)
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command,
"fetch": FetchCommand()
}
<commit_msg>Fix and hide --use-cache option<commit_after>import os
import argparse
import blah.repositories
import blah.fetcher
class WhatIsThisCommand(object):
def create_parser(self, subparser):
subparser.add_argument("directory", nargs="?")
def execute(self, args):
directory = args.directory if args.directory is not None else os.getcwd()
repository = blah.repositories.find_repository(directory)
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.working_directory)
what_is_this_command = WhatIsThisCommand()
class FetchCommand(object):
def create_parser(self, subparser):
subparser.add_argument("repository_uri", metavar="repository-uri")
subparser.add_argument("local_path", metavar="local-path")
subparser.add_argument("--use-cache", default=False, help=argparse.SUPPRESS, action="store_true")
def execute(self, args):
blah.fetcher.fetch(args.repository_uri, args.local_path, args.use_cache)
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command,
"fetch": FetchCommand()
}
|
59e454f0272725c46d06f3d5f32edafa866f578b
|
registration/admin.py
|
registration/admin.py
|
from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
|
from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
|
Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.
|
Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.
|
Python
|
bsd-3-clause
|
remarkablerocket/django-mailinglist-registration,remarkablerocket/django-mailinglist-registration
|
from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.
|
from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
|
<commit_before>from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
<commit_msg>Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.<commit_after>
|
from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
|
from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
|
<commit_before>from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
<commit_msg>Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.<commit_after>from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
|
03b180bf1dad2f7f82dec177b1fece369bdcf5e6
|
build/oggm/run_test.py
|
build/oggm/run_test.py
|
#!/usr/bin/env python
import os
os.environ["MPLBACKEND"] = 'agg'
import matplotlib
matplotlib.use('agg')
import pytest
import pytest_mpl.plugin
import oggm
import sys
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
initial_dir = os.getcwd()
oggm_file = os.path.abspath(oggm.__file__)
oggm_dir = os.path.dirname(oggm_file)
sys.exit(pytest.main([oggm_dir, '--mpl'], plugins=[pytest_mpl.plugin]))
|
#!/usr/bin/env python
import os
os.environ["MPLBACKEND"] = 'agg'
import matplotlib
matplotlib.use('agg')
import pytest
import oggm
import sys
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
initial_dir = os.getcwd()
oggm_file = os.path.abspath(oggm.__file__)
oggm_dir = os.path.dirname(oggm_file)
sys.exit(pytest.main([oggm_dir, '--mpl']))
|
Revert "Try to fix mpl invocation"
|
Revert "Try to fix mpl invocation"
This reverts commit c5f0e32eb12ae7809b9fde0371bfb73ec86d47a3.
|
Python
|
mit
|
OGGM/OGGM-Anaconda
|
#!/usr/bin/env python
import os
os.environ["MPLBACKEND"] = 'agg'
import matplotlib
matplotlib.use('agg')
import pytest
import pytest_mpl.plugin
import oggm
import sys
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
initial_dir = os.getcwd()
oggm_file = os.path.abspath(oggm.__file__)
oggm_dir = os.path.dirname(oggm_file)
sys.exit(pytest.main([oggm_dir, '--mpl'], plugins=[pytest_mpl.plugin]))
Revert "Try to fix mpl invocation"
This reverts commit c5f0e32eb12ae7809b9fde0371bfb73ec86d47a3.
|
#!/usr/bin/env python
import os
os.environ["MPLBACKEND"] = 'agg'
import matplotlib
matplotlib.use('agg')
import pytest
import oggm
import sys
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
initial_dir = os.getcwd()
oggm_file = os.path.abspath(oggm.__file__)
oggm_dir = os.path.dirname(oggm_file)
sys.exit(pytest.main([oggm_dir, '--mpl']))
|
<commit_before>#!/usr/bin/env python
import os
os.environ["MPLBACKEND"] = 'agg'
import matplotlib
matplotlib.use('agg')
import pytest
import pytest_mpl.plugin
import oggm
import sys
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
initial_dir = os.getcwd()
oggm_file = os.path.abspath(oggm.__file__)
oggm_dir = os.path.dirname(oggm_file)
sys.exit(pytest.main([oggm_dir, '--mpl'], plugins=[pytest_mpl.plugin]))
<commit_msg>Revert "Try to fix mpl invocation"
This reverts commit c5f0e32eb12ae7809b9fde0371bfb73ec86d47a3.<commit_after>
|
#!/usr/bin/env python
import os
os.environ["MPLBACKEND"] = 'agg'
import matplotlib
matplotlib.use('agg')
import pytest
import oggm
import sys
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
initial_dir = os.getcwd()
oggm_file = os.path.abspath(oggm.__file__)
oggm_dir = os.path.dirname(oggm_file)
sys.exit(pytest.main([oggm_dir, '--mpl']))
|
#!/usr/bin/env python
import os
os.environ["MPLBACKEND"] = 'agg'
import matplotlib
matplotlib.use('agg')
import pytest
import pytest_mpl.plugin
import oggm
import sys
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
initial_dir = os.getcwd()
oggm_file = os.path.abspath(oggm.__file__)
oggm_dir = os.path.dirname(oggm_file)
sys.exit(pytest.main([oggm_dir, '--mpl'], plugins=[pytest_mpl.plugin]))
Revert "Try to fix mpl invocation"
This reverts commit c5f0e32eb12ae7809b9fde0371bfb73ec86d47a3.#!/usr/bin/env python
import os
os.environ["MPLBACKEND"] = 'agg'
import matplotlib
matplotlib.use('agg')
import pytest
import oggm
import sys
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
initial_dir = os.getcwd()
oggm_file = os.path.abspath(oggm.__file__)
oggm_dir = os.path.dirname(oggm_file)
sys.exit(pytest.main([oggm_dir, '--mpl']))
|
<commit_before>#!/usr/bin/env python
import os
os.environ["MPLBACKEND"] = 'agg'
import matplotlib
matplotlib.use('agg')
import pytest
import pytest_mpl.plugin
import oggm
import sys
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
initial_dir = os.getcwd()
oggm_file = os.path.abspath(oggm.__file__)
oggm_dir = os.path.dirname(oggm_file)
sys.exit(pytest.main([oggm_dir, '--mpl'], plugins=[pytest_mpl.plugin]))
<commit_msg>Revert "Try to fix mpl invocation"
This reverts commit c5f0e32eb12ae7809b9fde0371bfb73ec86d47a3.<commit_after>#!/usr/bin/env python
import os
os.environ["MPLBACKEND"] = 'agg'
import matplotlib
matplotlib.use('agg')
import pytest
import oggm
import sys
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
initial_dir = os.getcwd()
oggm_file = os.path.abspath(oggm.__file__)
oggm_dir = os.path.dirname(oggm_file)
sys.exit(pytest.main([oggm_dir, '--mpl']))
|
695b7cabdc46f3f90b116fa63380bff2ecbfab0c
|
json_settings/__init__.py
|
json_settings/__init__.py
|
import json
import logging
import os
import sys
def json_patch(path):
logging.warn("Attempting to load local settings from %r" % (path,))
try:
d = json.load(open(path))
except IOError:
logging.exception("Unable to open json settings in %r" % (path,))
raise SystemExit(-1)
except ValueError:
logging.exception("Unable to parse json settings in %r" % (path,))
raise SystemExit(-1)
for k, v in d.items():
globals()[k] = v
def patch_settings():
env_settings = os.environ.get('JSON_SETTINGS', None)
if env_settings is None:
# We only use the default if it exists
settings_file = globals().get('JSON_SETTINGS_FILE', 'settings.json')
env_settings = os.path.join(sys.prefix, "etc", settings_file)
if not os.path.exists(env_settings):
return
json_patch(env_settings)
if "VAR_DIRECTORY" not in globals():
globals()["VAR_DIRECTORY"] = os.path.join(sys.prefix, "var")
if "STATIC_ROOT" not in globals():
globals()["STATIC_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "static")
if "MEDIA_ROOT" not in globals():
globals()["MEDIA_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "media")
patch_settings()
|
import json
import logging
import os
import sys
def json_patch(path):
logging.warn("Attempting to load local settings from %r" % (path,))
try:
d = json.load(open(path))
except IOError:
logging.exception("Unable to open json settings in %r" % (path,))
raise SystemExit(-1)
except ValueError:
logging.exception("Unable to parse json settings in %r" % (path,))
raise SystemExit(-1)
for k, v in d.items():
globals()[k] = v
def patch_settings(json_settings_file):
env_settings = os.environ.get('JSON_SETTINGS', None)
if env_settings is None:
# We only use the default if it exists
env_settings = os.path.join(sys.prefix, "etc", json_settings_file)
if not os.path.exists(env_settings):
return
json_patch(env_settings)
if "VAR_DIRECTORY" not in globals():
globals()["VAR_DIRECTORY"] = os.path.join(sys.prefix, "var")
if "STATIC_ROOT" not in globals():
globals()["STATIC_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "static")
if "MEDIA_ROOT" not in globals():
globals()["MEDIA_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "media")
|
Patch to make work like we expect
|
Patch to make work like we expect
|
Python
|
apache-2.0
|
coolshop-com/django-json-settings
|
import json
import logging
import os
import sys
def json_patch(path):
logging.warn("Attempting to load local settings from %r" % (path,))
try:
d = json.load(open(path))
except IOError:
logging.exception("Unable to open json settings in %r" % (path,))
raise SystemExit(-1)
except ValueError:
logging.exception("Unable to parse json settings in %r" % (path,))
raise SystemExit(-1)
for k, v in d.items():
globals()[k] = v
def patch_settings():
env_settings = os.environ.get('JSON_SETTINGS', None)
if env_settings is None:
# We only use the default if it exists
settings_file = globals().get('JSON_SETTINGS_FILE', 'settings.json')
env_settings = os.path.join(sys.prefix, "etc", settings_file)
if not os.path.exists(env_settings):
return
json_patch(env_settings)
if "VAR_DIRECTORY" not in globals():
globals()["VAR_DIRECTORY"] = os.path.join(sys.prefix, "var")
if "STATIC_ROOT" not in globals():
globals()["STATIC_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "static")
if "MEDIA_ROOT" not in globals():
globals()["MEDIA_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "media")
patch_settings()
Patch to make work like we expect
|
import json
import logging
import os
import sys
def json_patch(path):
logging.warn("Attempting to load local settings from %r" % (path,))
try:
d = json.load(open(path))
except IOError:
logging.exception("Unable to open json settings in %r" % (path,))
raise SystemExit(-1)
except ValueError:
logging.exception("Unable to parse json settings in %r" % (path,))
raise SystemExit(-1)
for k, v in d.items():
globals()[k] = v
def patch_settings(json_settings_file):
env_settings = os.environ.get('JSON_SETTINGS', None)
if env_settings is None:
# We only use the default if it exists
env_settings = os.path.join(sys.prefix, "etc", json_settings_file)
if not os.path.exists(env_settings):
return
json_patch(env_settings)
if "VAR_DIRECTORY" not in globals():
globals()["VAR_DIRECTORY"] = os.path.join(sys.prefix, "var")
if "STATIC_ROOT" not in globals():
globals()["STATIC_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "static")
if "MEDIA_ROOT" not in globals():
globals()["MEDIA_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "media")
|
<commit_before>import json
import logging
import os
import sys
def json_patch(path):
logging.warn("Attempting to load local settings from %r" % (path,))
try:
d = json.load(open(path))
except IOError:
logging.exception("Unable to open json settings in %r" % (path,))
raise SystemExit(-1)
except ValueError:
logging.exception("Unable to parse json settings in %r" % (path,))
raise SystemExit(-1)
for k, v in d.items():
globals()[k] = v
def patch_settings():
env_settings = os.environ.get('JSON_SETTINGS', None)
if env_settings is None:
# We only use the default if it exists
settings_file = globals().get('JSON_SETTINGS_FILE', 'settings.json')
env_settings = os.path.join(sys.prefix, "etc", settings_file)
if not os.path.exists(env_settings):
return
json_patch(env_settings)
if "VAR_DIRECTORY" not in globals():
globals()["VAR_DIRECTORY"] = os.path.join(sys.prefix, "var")
if "STATIC_ROOT" not in globals():
globals()["STATIC_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "static")
if "MEDIA_ROOT" not in globals():
globals()["MEDIA_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "media")
patch_settings()
<commit_msg>Patch to make work like we expect<commit_after>
|
import json
import logging
import os
import sys
def json_patch(path):
logging.warn("Attempting to load local settings from %r" % (path,))
try:
d = json.load(open(path))
except IOError:
logging.exception("Unable to open json settings in %r" % (path,))
raise SystemExit(-1)
except ValueError:
logging.exception("Unable to parse json settings in %r" % (path,))
raise SystemExit(-1)
for k, v in d.items():
globals()[k] = v
def patch_settings(json_settings_file):
env_settings = os.environ.get('JSON_SETTINGS', None)
if env_settings is None:
# We only use the default if it exists
env_settings = os.path.join(sys.prefix, "etc", json_settings_file)
if not os.path.exists(env_settings):
return
json_patch(env_settings)
if "VAR_DIRECTORY" not in globals():
globals()["VAR_DIRECTORY"] = os.path.join(sys.prefix, "var")
if "STATIC_ROOT" not in globals():
globals()["STATIC_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "static")
if "MEDIA_ROOT" not in globals():
globals()["MEDIA_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "media")
|
import json
import logging
import os
import sys
def json_patch(path):
logging.warn("Attempting to load local settings from %r" % (path,))
try:
d = json.load(open(path))
except IOError:
logging.exception("Unable to open json settings in %r" % (path,))
raise SystemExit(-1)
except ValueError:
logging.exception("Unable to parse json settings in %r" % (path,))
raise SystemExit(-1)
for k, v in d.items():
globals()[k] = v
def patch_settings():
env_settings = os.environ.get('JSON_SETTINGS', None)
if env_settings is None:
# We only use the default if it exists
settings_file = globals().get('JSON_SETTINGS_FILE', 'settings.json')
env_settings = os.path.join(sys.prefix, "etc", settings_file)
if not os.path.exists(env_settings):
return
json_patch(env_settings)
if "VAR_DIRECTORY" not in globals():
globals()["VAR_DIRECTORY"] = os.path.join(sys.prefix, "var")
if "STATIC_ROOT" not in globals():
globals()["STATIC_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "static")
if "MEDIA_ROOT" not in globals():
globals()["MEDIA_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "media")
patch_settings()
Patch to make work like we expectimport json
import logging
import os
import sys
def json_patch(path):
logging.warn("Attempting to load local settings from %r" % (path,))
try:
d = json.load(open(path))
except IOError:
logging.exception("Unable to open json settings in %r" % (path,))
raise SystemExit(-1)
except ValueError:
logging.exception("Unable to parse json settings in %r" % (path,))
raise SystemExit(-1)
for k, v in d.items():
globals()[k] = v
def patch_settings(json_settings_file):
env_settings = os.environ.get('JSON_SETTINGS', None)
if env_settings is None:
# We only use the default if it exists
env_settings = os.path.join(sys.prefix, "etc", json_settings_file)
if not os.path.exists(env_settings):
return
json_patch(env_settings)
if "VAR_DIRECTORY" not in globals():
globals()["VAR_DIRECTORY"] = os.path.join(sys.prefix, "var")
if "STATIC_ROOT" not in globals():
globals()["STATIC_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "static")
if "MEDIA_ROOT" not in globals():
globals()["MEDIA_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "media")
|
<commit_before>import json
import logging
import os
import sys
def json_patch(path):
logging.warn("Attempting to load local settings from %r" % (path,))
try:
d = json.load(open(path))
except IOError:
logging.exception("Unable to open json settings in %r" % (path,))
raise SystemExit(-1)
except ValueError:
logging.exception("Unable to parse json settings in %r" % (path,))
raise SystemExit(-1)
for k, v in d.items():
globals()[k] = v
def patch_settings():
env_settings = os.environ.get('JSON_SETTINGS', None)
if env_settings is None:
# We only use the default if it exists
settings_file = globals().get('JSON_SETTINGS_FILE', 'settings.json')
env_settings = os.path.join(sys.prefix, "etc", settings_file)
if not os.path.exists(env_settings):
return
json_patch(env_settings)
if "VAR_DIRECTORY" not in globals():
globals()["VAR_DIRECTORY"] = os.path.join(sys.prefix, "var")
if "STATIC_ROOT" not in globals():
globals()["STATIC_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "static")
if "MEDIA_ROOT" not in globals():
globals()["MEDIA_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "media")
patch_settings()
<commit_msg>Patch to make work like we expect<commit_after>import json
import logging
import os
import sys
def json_patch(path):
logging.warn("Attempting to load local settings from %r" % (path,))
try:
d = json.load(open(path))
except IOError:
logging.exception("Unable to open json settings in %r" % (path,))
raise SystemExit(-1)
except ValueError:
logging.exception("Unable to parse json settings in %r" % (path,))
raise SystemExit(-1)
for k, v in d.items():
globals()[k] = v
def patch_settings(json_settings_file):
env_settings = os.environ.get('JSON_SETTINGS', None)
if env_settings is None:
# We only use the default if it exists
env_settings = os.path.join(sys.prefix, "etc", json_settings_file)
if not os.path.exists(env_settings):
return
json_patch(env_settings)
if "VAR_DIRECTORY" not in globals():
globals()["VAR_DIRECTORY"] = os.path.join(sys.prefix, "var")
if "STATIC_ROOT" not in globals():
globals()["STATIC_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "static")
if "MEDIA_ROOT" not in globals():
globals()["MEDIA_ROOT"] = os.path.join(
globals()["VAR_DIRECTORY"], "media")
|
df9124765a53379626f516ca87e2a19678cd31b6
|
pm/utils/filesystem.py
|
pm/utils/filesystem.py
|
""" Filesystem utilities
"""
import contextlib
import os
RUN_RE = '\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB][A-Z\d]{9}'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory.
"""
cur_dir = os.getcwd()
# This is weird behavior. I'm removing and and we'll see if anything breaks.
#safe_makedir(new_dir)
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
|
""" Filesystem utilities
"""
import contextlib
import os
RUN_RE = '\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB0][A-Z\d]'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory.
"""
cur_dir = os.getcwd()
# This is weird behavior. I'm removing and and we'll see if anything breaks.
#safe_makedir(new_dir)
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
|
Modify regexp to pick up MiSeqs as well. Thanks @senthil10
|
Modify regexp to pick up MiSeqs as well. Thanks @senthil10
|
Python
|
mit
|
vezzi/TACA,guillermo-carrasco/TACA,vezzi/TACA,senthil10/TACA,kate-v-stepanova/TACA,senthil10/TACA,kate-v-stepanova/TACA,SciLifeLab/TACA,guillermo-carrasco/TACA,SciLifeLab/TACA,b97pla/TACA,SciLifeLab/TACA,b97pla/TACA
|
""" Filesystem utilities
"""
import contextlib
import os
RUN_RE = '\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB][A-Z\d]{9}'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory.
"""
cur_dir = os.getcwd()
# This is weird behavior. I'm removing and and we'll see if anything breaks.
#safe_makedir(new_dir)
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
Modify regexp to pick up MiSeqs as well. Thanks @senthil10
|
""" Filesystem utilities
"""
import contextlib
import os
RUN_RE = '\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB0][A-Z\d]'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory.
"""
cur_dir = os.getcwd()
# This is weird behavior. I'm removing and and we'll see if anything breaks.
#safe_makedir(new_dir)
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
|
<commit_before>""" Filesystem utilities
"""
import contextlib
import os
RUN_RE = '\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB][A-Z\d]{9}'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory.
"""
cur_dir = os.getcwd()
# This is weird behavior. I'm removing and and we'll see if anything breaks.
#safe_makedir(new_dir)
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
<commit_msg>Modify regexp to pick up MiSeqs as well. Thanks @senthil10<commit_after>
|
""" Filesystem utilities
"""
import contextlib
import os
RUN_RE = '\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB0][A-Z\d]'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory.
"""
cur_dir = os.getcwd()
# This is weird behavior. I'm removing and and we'll see if anything breaks.
#safe_makedir(new_dir)
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
|
""" Filesystem utilities
"""
import contextlib
import os
RUN_RE = '\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB][A-Z\d]{9}'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory.
"""
cur_dir = os.getcwd()
# This is weird behavior. I'm removing and and we'll see if anything breaks.
#safe_makedir(new_dir)
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
Modify regexp to pick up MiSeqs as well. Thanks @senthil10""" Filesystem utilities
"""
import contextlib
import os
RUN_RE = '\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB0][A-Z\d]'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory.
"""
cur_dir = os.getcwd()
# This is weird behavior. I'm removing and and we'll see if anything breaks.
#safe_makedir(new_dir)
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
|
<commit_before>""" Filesystem utilities
"""
import contextlib
import os
RUN_RE = '\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB][A-Z\d]{9}'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory.
"""
cur_dir = os.getcwd()
# This is weird behavior. I'm removing and and we'll see if anything breaks.
#safe_makedir(new_dir)
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
<commit_msg>Modify regexp to pick up MiSeqs as well. Thanks @senthil10<commit_after>""" Filesystem utilities
"""
import contextlib
import os
RUN_RE = '\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB0][A-Z\d]'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory.
"""
cur_dir = os.getcwd()
# This is weird behavior. I'm removing and and we'll see if anything breaks.
#safe_makedir(new_dir)
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
|
d6be1dfbd9124ed1c35c32a0819bbfa3d9e6759a
|
scripts/linux/cura.py
|
scripts/linux/cura.py
|
#!/usr/bin/python
import os, sys
try:
import OpenGL
import wx
import serial
import numpy
import power
except ImportError as e:
module = e.message.lstrip('No module named ')
print 'Requires ' + module
if module == 'power':
print "Install from: https://github.com/GreatFruitOmsk/Power"
else:
print "Try sudo easy_install " + module
sys.path.insert(1, os.path.dirname(__file__))
import Cura.cura as cura
cura.main()
|
#!/usr/bin/python
import os, sys
try:
import OpenGL
import wx
import serial
import numpy
import power
except ImportError as e:
module = e.message.lstrip('No module named ')
if module == 'OpenGL':
module = 'PyOpenGL'
elif module == 'serial':
module = 'pyserial'
print 'Requires ' + module
if module == 'power':
print "Install from: https://github.com/GreatFruitOmsk/Power"
else:
print "Try sudo easy_install " + module
exit(1)
sys.path.insert(1, os.path.dirname(__file__))
import Cura.cura as cura
cura.main()
|
Add py prefix to OpenGL and serial. Exit when error.
|
Add py prefix to OpenGL and serial. Exit when error.
|
Python
|
agpl-3.0
|
alephobjects/Cura,alephobjects/Cura,alephobjects/Cura
|
#!/usr/bin/python
import os, sys
try:
import OpenGL
import wx
import serial
import numpy
import power
except ImportError as e:
module = e.message.lstrip('No module named ')
print 'Requires ' + module
if module == 'power':
print "Install from: https://github.com/GreatFruitOmsk/Power"
else:
print "Try sudo easy_install " + module
sys.path.insert(1, os.path.dirname(__file__))
import Cura.cura as cura
cura.main()
Add py prefix to OpenGL and serial. Exit when error.
|
#!/usr/bin/python
import os, sys
try:
import OpenGL
import wx
import serial
import numpy
import power
except ImportError as e:
module = e.message.lstrip('No module named ')
if module == 'OpenGL':
module = 'PyOpenGL'
elif module == 'serial':
module = 'pyserial'
print 'Requires ' + module
if module == 'power':
print "Install from: https://github.com/GreatFruitOmsk/Power"
else:
print "Try sudo easy_install " + module
exit(1)
sys.path.insert(1, os.path.dirname(__file__))
import Cura.cura as cura
cura.main()
|
<commit_before>#!/usr/bin/python
import os, sys
try:
import OpenGL
import wx
import serial
import numpy
import power
except ImportError as e:
module = e.message.lstrip('No module named ')
print 'Requires ' + module
if module == 'power':
print "Install from: https://github.com/GreatFruitOmsk/Power"
else:
print "Try sudo easy_install " + module
sys.path.insert(1, os.path.dirname(__file__))
import Cura.cura as cura
cura.main()
<commit_msg>Add py prefix to OpenGL and serial. Exit when error.<commit_after>
|
#!/usr/bin/python
import os, sys
try:
import OpenGL
import wx
import serial
import numpy
import power
except ImportError as e:
module = e.message.lstrip('No module named ')
if module == 'OpenGL':
module = 'PyOpenGL'
elif module == 'serial':
module = 'pyserial'
print 'Requires ' + module
if module == 'power':
print "Install from: https://github.com/GreatFruitOmsk/Power"
else:
print "Try sudo easy_install " + module
exit(1)
sys.path.insert(1, os.path.dirname(__file__))
import Cura.cura as cura
cura.main()
|
#!/usr/bin/python
import os, sys
try:
import OpenGL
import wx
import serial
import numpy
import power
except ImportError as e:
module = e.message.lstrip('No module named ')
print 'Requires ' + module
if module == 'power':
print "Install from: https://github.com/GreatFruitOmsk/Power"
else:
print "Try sudo easy_install " + module
sys.path.insert(1, os.path.dirname(__file__))
import Cura.cura as cura
cura.main()
Add py prefix to OpenGL and serial. Exit when error.#!/usr/bin/python
import os, sys
try:
import OpenGL
import wx
import serial
import numpy
import power
except ImportError as e:
module = e.message.lstrip('No module named ')
if module == 'OpenGL':
module = 'PyOpenGL'
elif module == 'serial':
module = 'pyserial'
print 'Requires ' + module
if module == 'power':
print "Install from: https://github.com/GreatFruitOmsk/Power"
else:
print "Try sudo easy_install " + module
exit(1)
sys.path.insert(1, os.path.dirname(__file__))
import Cura.cura as cura
cura.main()
|
<commit_before>#!/usr/bin/python
import os, sys
try:
import OpenGL
import wx
import serial
import numpy
import power
except ImportError as e:
module = e.message.lstrip('No module named ')
print 'Requires ' + module
if module == 'power':
print "Install from: https://github.com/GreatFruitOmsk/Power"
else:
print "Try sudo easy_install " + module
sys.path.insert(1, os.path.dirname(__file__))
import Cura.cura as cura
cura.main()
<commit_msg>Add py prefix to OpenGL and serial. Exit when error.<commit_after>#!/usr/bin/python
import os, sys
try:
import OpenGL
import wx
import serial
import numpy
import power
except ImportError as e:
module = e.message.lstrip('No module named ')
if module == 'OpenGL':
module = 'PyOpenGL'
elif module == 'serial':
module = 'pyserial'
print 'Requires ' + module
if module == 'power':
print "Install from: https://github.com/GreatFruitOmsk/Power"
else:
print "Try sudo easy_install " + module
exit(1)
sys.path.insert(1, os.path.dirname(__file__))
import Cura.cura as cura
cura.main()
|
f4ff7e557e1ca3409ebe64eafb723fad10d89812
|
coverage/version.py
|
coverage/version.py
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""The version and URL for coverage.py"""
# This file is exec'ed in setup.py, don't import anything!
# Same semantics as sys.version_info.
version_info = (4, 2, 0, 'alpha', 1)
def _make_version(major, minor, micro, releaselevel, serial):
"""Create a readable version string from version_info tuple components."""
assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
version = "%d.%d" % (major, minor)
if micro:
version += ".%d" % (micro,)
if releaselevel != 'final':
short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
version += "%s%d" % (short, serial)
return version
def _make_url(major, minor, micro, releaselevel, serial):
"""Make the URL people should start at for this version of coverage.py."""
url = "https://coverage.readthedocs.io"
if releaselevel != 'final':
# For pre-releases, use a version-specific URL.
url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
return url
__version__ = _make_version(*version_info)
__url__ = _make_url(*version_info)
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""The version and URL for coverage.py"""
# This file is exec'ed in setup.py, don't import anything!
# Same semantics as sys.version_info.
version_info = (4, 2, 0, 'beta', 1)
def _make_version(major, minor, micro, releaselevel, serial):
"""Create a readable version string from version_info tuple components."""
assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
version = "%d.%d" % (major, minor)
if micro:
version += ".%d" % (micro,)
if releaselevel != 'final':
short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
version += "%s%d" % (short, serial)
return version
def _make_url(major, minor, micro, releaselevel, serial):
"""Make the URL people should start at for this version of coverage.py."""
url = "https://coverage.readthedocs.io"
if releaselevel != 'final':
# For pre-releases, use a version-specific URL.
url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
return url
__version__ = _make_version(*version_info)
__url__ = _make_url(*version_info)
|
Make a beta of 4.2
|
Make a beta of 4.2
|
Python
|
apache-2.0
|
blueyed/coveragepy,nedbat/coveragepy,nedbat/coveragepy,nedbat/coveragepy,hugovk/coveragepy,blueyed/coveragepy,blueyed/coveragepy,nedbat/coveragepy,hugovk/coveragepy,hugovk/coveragepy,blueyed/coveragepy,nedbat/coveragepy,hugovk/coveragepy,blueyed/coveragepy,hugovk/coveragepy
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""The version and URL for coverage.py"""
# This file is exec'ed in setup.py, don't import anything!
# Same semantics as sys.version_info.
version_info = (4, 2, 0, 'alpha', 1)
def _make_version(major, minor, micro, releaselevel, serial):
"""Create a readable version string from version_info tuple components."""
assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
version = "%d.%d" % (major, minor)
if micro:
version += ".%d" % (micro,)
if releaselevel != 'final':
short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
version += "%s%d" % (short, serial)
return version
def _make_url(major, minor, micro, releaselevel, serial):
"""Make the URL people should start at for this version of coverage.py."""
url = "https://coverage.readthedocs.io"
if releaselevel != 'final':
# For pre-releases, use a version-specific URL.
url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
return url
__version__ = _make_version(*version_info)
__url__ = _make_url(*version_info)
Make a beta of 4.2
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""The version and URL for coverage.py"""
# This file is exec'ed in setup.py, don't import anything!
# Same semantics as sys.version_info.
version_info = (4, 2, 0, 'beta', 1)
def _make_version(major, minor, micro, releaselevel, serial):
"""Create a readable version string from version_info tuple components."""
assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
version = "%d.%d" % (major, minor)
if micro:
version += ".%d" % (micro,)
if releaselevel != 'final':
short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
version += "%s%d" % (short, serial)
return version
def _make_url(major, minor, micro, releaselevel, serial):
"""Make the URL people should start at for this version of coverage.py."""
url = "https://coverage.readthedocs.io"
if releaselevel != 'final':
# For pre-releases, use a version-specific URL.
url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
return url
__version__ = _make_version(*version_info)
__url__ = _make_url(*version_info)
|
<commit_before># Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""The version and URL for coverage.py"""
# This file is exec'ed in setup.py, don't import anything!
# Same semantics as sys.version_info.
version_info = (4, 2, 0, 'alpha', 1)
def _make_version(major, minor, micro, releaselevel, serial):
"""Create a readable version string from version_info tuple components."""
assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
version = "%d.%d" % (major, minor)
if micro:
version += ".%d" % (micro,)
if releaselevel != 'final':
short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
version += "%s%d" % (short, serial)
return version
def _make_url(major, minor, micro, releaselevel, serial):
"""Make the URL people should start at for this version of coverage.py."""
url = "https://coverage.readthedocs.io"
if releaselevel != 'final':
# For pre-releases, use a version-specific URL.
url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
return url
__version__ = _make_version(*version_info)
__url__ = _make_url(*version_info)
<commit_msg>Make a beta of 4.2<commit_after>
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""The version and URL for coverage.py"""
# This file is exec'ed in setup.py, don't import anything!
# Same semantics as sys.version_info.
version_info = (4, 2, 0, 'beta', 1)
def _make_version(major, minor, micro, releaselevel, serial):
"""Create a readable version string from version_info tuple components."""
assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
version = "%d.%d" % (major, minor)
if micro:
version += ".%d" % (micro,)
if releaselevel != 'final':
short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
version += "%s%d" % (short, serial)
return version
def _make_url(major, minor, micro, releaselevel, serial):
"""Make the URL people should start at for this version of coverage.py."""
url = "https://coverage.readthedocs.io"
if releaselevel != 'final':
# For pre-releases, use a version-specific URL.
url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
return url
__version__ = _make_version(*version_info)
__url__ = _make_url(*version_info)
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""The version and URL for coverage.py"""
# This file is exec'ed in setup.py, don't import anything!
# Same semantics as sys.version_info.
version_info = (4, 2, 0, 'alpha', 1)
def _make_version(major, minor, micro, releaselevel, serial):
"""Create a readable version string from version_info tuple components."""
assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
version = "%d.%d" % (major, minor)
if micro:
version += ".%d" % (micro,)
if releaselevel != 'final':
short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
version += "%s%d" % (short, serial)
return version
def _make_url(major, minor, micro, releaselevel, serial):
"""Make the URL people should start at for this version of coverage.py."""
url = "https://coverage.readthedocs.io"
if releaselevel != 'final':
# For pre-releases, use a version-specific URL.
url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
return url
__version__ = _make_version(*version_info)
__url__ = _make_url(*version_info)
Make a beta of 4.2# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""The version and URL for coverage.py"""
# This file is exec'ed in setup.py, don't import anything!
# Same semantics as sys.version_info.
version_info = (4, 2, 0, 'beta', 1)
def _make_version(major, minor, micro, releaselevel, serial):
"""Create a readable version string from version_info tuple components."""
assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
version = "%d.%d" % (major, minor)
if micro:
version += ".%d" % (micro,)
if releaselevel != 'final':
short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
version += "%s%d" % (short, serial)
return version
def _make_url(major, minor, micro, releaselevel, serial):
"""Make the URL people should start at for this version of coverage.py."""
url = "https://coverage.readthedocs.io"
if releaselevel != 'final':
# For pre-releases, use a version-specific URL.
url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
return url
__version__ = _make_version(*version_info)
__url__ = _make_url(*version_info)
|
<commit_before># Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""The version and URL for coverage.py"""
# This file is exec'ed in setup.py, don't import anything!
# Same semantics as sys.version_info.
version_info = (4, 2, 0, 'alpha', 1)
def _make_version(major, minor, micro, releaselevel, serial):
"""Create a readable version string from version_info tuple components."""
assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
version = "%d.%d" % (major, minor)
if micro:
version += ".%d" % (micro,)
if releaselevel != 'final':
short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
version += "%s%d" % (short, serial)
return version
def _make_url(major, minor, micro, releaselevel, serial):
"""Make the URL people should start at for this version of coverage.py."""
url = "https://coverage.readthedocs.io"
if releaselevel != 'final':
# For pre-releases, use a version-specific URL.
url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
return url
__version__ = _make_version(*version_info)
__url__ = _make_url(*version_info)
<commit_msg>Make a beta of 4.2<commit_after># Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""The version and URL for coverage.py"""
# This file is exec'ed in setup.py, don't import anything!
# Same semantics as sys.version_info.
version_info = (4, 2, 0, 'beta', 1)
def _make_version(major, minor, micro, releaselevel, serial):
"""Create a readable version string from version_info tuple components."""
assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
version = "%d.%d" % (major, minor)
if micro:
version += ".%d" % (micro,)
if releaselevel != 'final':
short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
version += "%s%d" % (short, serial)
return version
def _make_url(major, minor, micro, releaselevel, serial):
"""Make the URL people should start at for this version of coverage.py."""
url = "https://coverage.readthedocs.io"
if releaselevel != 'final':
# For pre-releases, use a version-specific URL.
url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
return url
__version__ = _make_version(*version_info)
__url__ = _make_url(*version_info)
|
56aba3ab7a23dd8bf322a9d577fa64e686dfc9ef
|
serrano/middleware.py
|
serrano/middleware.py
|
class SessionMiddleware(object):
def process_request(self, request):
if getattr(request, 'user', None) and request.user.is_authenticated():
return
session = request.session
# Ensure the session is created view processing, but only if a cookie
# had been previously set. This is to prevent creating exorbitant
# numbers of sessions non-browser clients, such as bots.
if session.session_key is None:
if session.test_cookie_worked():
session.delete_test_cookie()
request.session.create()
else:
session.set_test_cookie()
|
from .tokens import get_request_token
class SessionMiddleware(object):
def process_request(self, request):
if getattr(request, 'user', None) and request.user.is_authenticated():
return
# Token-based authentication is attempting to be used, bypass CSRF
# check
if get_request_token(request):
request.csrf_processing_done = True
return
session = request.session
# Ensure the session is created view processing, but only if a cookie
# had been previously set. This is to prevent creating exorbitant
# numbers of sessions for non-browser clients, such as bots.
if session.session_key is None:
if session.test_cookie_worked():
session.delete_test_cookie()
request.session.create()
else:
session.set_test_cookie()
|
Update SessionMiddleware to bypass CSRF if request token is present
|
Update SessionMiddleware to bypass CSRF if request token is present
For non-session-based authentication, Serrano resources handle
authenticating using a token based approach. If it is present, CSRF
must be turned off to exempt the resources from the check.
|
Python
|
bsd-2-clause
|
rv816/serrano_night,chop-dbhi/serrano,chop-dbhi/serrano,rv816/serrano_night
|
class SessionMiddleware(object):
def process_request(self, request):
if getattr(request, 'user', None) and request.user.is_authenticated():
return
session = request.session
# Ensure the session is created view processing, but only if a cookie
# had been previously set. This is to prevent creating exorbitant
# numbers of sessions non-browser clients, such as bots.
if session.session_key is None:
if session.test_cookie_worked():
session.delete_test_cookie()
request.session.create()
else:
session.set_test_cookie()
Update SessionMiddleware to bypass CSRF if request token is present
For non-session-based authentication, Serrano resources handle
authenticating using a token based approach. If it is present, CSRF
must be turned off to exempt the resources from the check.
|
from .tokens import get_request_token
class SessionMiddleware(object):
def process_request(self, request):
if getattr(request, 'user', None) and request.user.is_authenticated():
return
# Token-based authentication is attempting to be used, bypass CSRF
# check
if get_request_token(request):
request.csrf_processing_done = True
return
session = request.session
# Ensure the session is created view processing, but only if a cookie
# had been previously set. This is to prevent creating exorbitant
# numbers of sessions for non-browser clients, such as bots.
if session.session_key is None:
if session.test_cookie_worked():
session.delete_test_cookie()
request.session.create()
else:
session.set_test_cookie()
|
<commit_before>class SessionMiddleware(object):
def process_request(self, request):
if getattr(request, 'user', None) and request.user.is_authenticated():
return
session = request.session
# Ensure the session is created view processing, but only if a cookie
# had been previously set. This is to prevent creating exorbitant
# numbers of sessions non-browser clients, such as bots.
if session.session_key is None:
if session.test_cookie_worked():
session.delete_test_cookie()
request.session.create()
else:
session.set_test_cookie()
<commit_msg>Update SessionMiddleware to bypass CSRF if request token is present
For non-session-based authentication, Serrano resources handle
authenticating using a token based approach. If it is present, CSRF
must be turned off to exempt the resources from the check.<commit_after>
|
from .tokens import get_request_token
class SessionMiddleware(object):
def process_request(self, request):
if getattr(request, 'user', None) and request.user.is_authenticated():
return
# Token-based authentication is attempting to be used, bypass CSRF
# check
if get_request_token(request):
request.csrf_processing_done = True
return
session = request.session
# Ensure the session is created view processing, but only if a cookie
# had been previously set. This is to prevent creating exorbitant
# numbers of sessions for non-browser clients, such as bots.
if session.session_key is None:
if session.test_cookie_worked():
session.delete_test_cookie()
request.session.create()
else:
session.set_test_cookie()
|
class SessionMiddleware(object):
def process_request(self, request):
if getattr(request, 'user', None) and request.user.is_authenticated():
return
session = request.session
# Ensure the session is created view processing, but only if a cookie
# had been previously set. This is to prevent creating exorbitant
# numbers of sessions non-browser clients, such as bots.
if session.session_key is None:
if session.test_cookie_worked():
session.delete_test_cookie()
request.session.create()
else:
session.set_test_cookie()
Update SessionMiddleware to bypass CSRF if request token is present
For non-session-based authentication, Serrano resources handle
authenticating using a token based approach. If it is present, CSRF
must be turned off to exempt the resources from the check.from .tokens import get_request_token
class SessionMiddleware(object):
def process_request(self, request):
if getattr(request, 'user', None) and request.user.is_authenticated():
return
# Token-based authentication is attempting to be used, bypass CSRF
# check
if get_request_token(request):
request.csrf_processing_done = True
return
session = request.session
# Ensure the session is created view processing, but only if a cookie
# had been previously set. This is to prevent creating exorbitant
# numbers of sessions for non-browser clients, such as bots.
if session.session_key is None:
if session.test_cookie_worked():
session.delete_test_cookie()
request.session.create()
else:
session.set_test_cookie()
|
<commit_before>class SessionMiddleware(object):
def process_request(self, request):
if getattr(request, 'user', None) and request.user.is_authenticated():
return
session = request.session
# Ensure the session is created view processing, but only if a cookie
# had been previously set. This is to prevent creating exorbitant
# numbers of sessions non-browser clients, such as bots.
if session.session_key is None:
if session.test_cookie_worked():
session.delete_test_cookie()
request.session.create()
else:
session.set_test_cookie()
<commit_msg>Update SessionMiddleware to bypass CSRF if request token is present
For non-session-based authentication, Serrano resources handle
authenticating using a token based approach. If it is present, CSRF
must be turned off to exempt the resources from the check.<commit_after>from .tokens import get_request_token
class SessionMiddleware(object):
def process_request(self, request):
if getattr(request, 'user', None) and request.user.is_authenticated():
return
# Token-based authentication is attempting to be used, bypass CSRF
# check
if get_request_token(request):
request.csrf_processing_done = True
return
session = request.session
# Ensure the session is created view processing, but only if a cookie
# had been previously set. This is to prevent creating exorbitant
# numbers of sessions for non-browser clients, such as bots.
if session.session_key is None:
if session.test_cookie_worked():
session.delete_test_cookie()
request.session.create()
else:
session.set_test_cookie()
|
bdc2cf7264897edba7fe84e4707aa83459aa8cf5
|
run.py
|
run.py
|
__author__ = 'matt'
import datetime
import blockbuster
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.__version__ + " @@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
blockbuster.bb_logging.logger.info(
'================Time restriction disabled================') \
if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
'================Time restriction enabled================')
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
if blockbuster.config.debug_mode:
blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True)
|
__author__ = 'matt'
import datetime
import blockbuster
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.config.app_version + " "
"@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
blockbuster.bb_logging.logger.info(
'================Time restriction disabled================') \
if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
'================Time restriction enabled================')
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
if blockbuster.config.debug_mode:
blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True)
|
Use the version in the config file
|
Use the version in the config file
|
Python
|
mit
|
mattstibbs/blockbuster-server,mattstibbs/blockbuster-server
|
__author__ = 'matt'
import datetime
import blockbuster
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.__version__ + " @@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
blockbuster.bb_logging.logger.info(
'================Time restriction disabled================') \
if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
'================Time restriction enabled================')
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
if blockbuster.config.debug_mode:
blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True)Use the version in the config file
|
__author__ = 'matt'
import datetime
import blockbuster
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.config.app_version + " "
"@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
blockbuster.bb_logging.logger.info(
'================Time restriction disabled================') \
if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
'================Time restriction enabled================')
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
if blockbuster.config.debug_mode:
blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True)
|
<commit_before>__author__ = 'matt'
import datetime
import blockbuster
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.__version__ + " @@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
blockbuster.bb_logging.logger.info(
'================Time restriction disabled================') \
if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
'================Time restriction enabled================')
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
if blockbuster.config.debug_mode:
blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True)<commit_msg>Use the version in the config file<commit_after>
|
__author__ = 'matt'
import datetime
import blockbuster
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.config.app_version + " "
"@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
blockbuster.bb_logging.logger.info(
'================Time restriction disabled================') \
if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
'================Time restriction enabled================')
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
if blockbuster.config.debug_mode:
blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True)
|
__author__ = 'matt'
import datetime
import blockbuster
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.__version__ + " @@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
blockbuster.bb_logging.logger.info(
'================Time restriction disabled================') \
if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
'================Time restriction enabled================')
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
if blockbuster.config.debug_mode:
blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True)Use the version in the config file__author__ = 'matt'
import datetime
import blockbuster
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.config.app_version + " "
"@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
blockbuster.bb_logging.logger.info(
'================Time restriction disabled================') \
if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
'================Time restriction enabled================')
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
if blockbuster.config.debug_mode:
blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True)
|
<commit_before>__author__ = 'matt'
import datetime
import blockbuster
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.__version__ + " @@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
blockbuster.bb_logging.logger.info(
'================Time restriction disabled================') \
if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
'================Time restriction enabled================')
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
if blockbuster.config.debug_mode:
blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True)<commit_msg>Use the version in the config file<commit_after>__author__ = 'matt'
import datetime
import blockbuster
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.config.app_version + " "
"@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====")
blockbuster.bb_logging.logger.info(
'================Time restriction disabled================') \
if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info(
'================Time restriction enabled================')
blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
if blockbuster.config.debug_mode:
blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
# This section only applies when you are running run.py directly
if __name__ == '__main__':
blockbuster.bb_logging.logger.info("Running http on port 5000")
blockbuster.app.run(host='0.0.0.0', debug=True)
|
7e373bd4b3c111b38d983e809aa443ff242860db
|
tests/test_pipelines/test_python.py
|
tests/test_pipelines/test_python.py
|
# -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.pipeline.python.virtualenv.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
|
# -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from facio.pipeline.python.virtualenv import Virtualenv
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.state.state.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
@patch('facio.base.input')
def test_get_name(self, mock_input):
mock_input.return_value = 'bar'
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'bar')
@patch('facio.base.input')
def test_get_name_default(self, mock_input):
mock_input.return_value = ''
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'foo')
|
Test for getting virtualenv name, prompting the user
|
Test for getting virtualenv name, prompting the user
|
Python
|
bsd-3-clause
|
krak3n/Facio,krak3n/Facio,krak3n/Facio,krak3n/Facio,krak3n/Facio
|
# -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.pipeline.python.virtualenv.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
Test for getting virtualenv name, prompting the user
|
# -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from facio.pipeline.python.virtualenv import Virtualenv
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.state.state.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
@patch('facio.base.input')
def test_get_name(self, mock_input):
mock_input.return_value = 'bar'
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'bar')
@patch('facio.base.input')
def test_get_name_default(self, mock_input):
mock_input.return_value = ''
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'foo')
|
<commit_before># -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.pipeline.python.virtualenv.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
<commit_msg>Test for getting virtualenv name, prompting the user<commit_after>
|
# -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from facio.pipeline.python.virtualenv import Virtualenv
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.state.state.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
@patch('facio.base.input')
def test_get_name(self, mock_input):
mock_input.return_value = 'bar'
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'bar')
@patch('facio.base.input')
def test_get_name_default(self, mock_input):
mock_input.return_value = ''
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'foo')
|
# -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.pipeline.python.virtualenv.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
Test for getting virtualenv name, prompting the user# -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from facio.pipeline.python.virtualenv import Virtualenv
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.state.state.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
@patch('facio.base.input')
def test_get_name(self, mock_input):
mock_input.return_value = 'bar'
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'bar')
@patch('facio.base.input')
def test_get_name_default(self, mock_input):
mock_input.return_value = ''
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'foo')
|
<commit_before># -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.pipeline.python.virtualenv.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
<commit_msg>Test for getting virtualenv name, prompting the user<commit_after># -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from facio.pipeline.python.virtualenv import Virtualenv
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.state.state.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
@patch('facio.base.input')
def test_get_name(self, mock_input):
mock_input.return_value = 'bar'
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'bar')
@patch('facio.base.input')
def test_get_name_default(self, mock_input):
mock_input.return_value = ''
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'foo')
|
bf228943ed149bc5ffea867d40b9a666a9707364
|
powerline/bindings/qtile/widget.py
|
powerline/bindings/qtile/widget.py
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, side='right', **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
self.side = side
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side=self.side)
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
Allow it to configure side
|
Allow it to configure side
|
Python
|
mit
|
junix/powerline,prvnkumar/powerline,xfumihiro/powerline,IvanAli/powerline,bartvm/powerline,dragon788/powerline,EricSB/powerline,darac/powerline,QuLogic/powerline,DoctorJellyface/powerline,bezhermoso/powerline,Luffin/powerline,IvanAli/powerline,S0lll0s/powerline,s0undt3ch/powerline,cyrixhero/powerline,s0undt3ch/powerline,xfumihiro/powerline,QuLogic/powerline,lukw00/powerline,russellb/powerline,Liangjianghao/powerline,bartvm/powerline,EricSB/powerline,xfumihiro/powerline,kenrachynski/powerline,IvanAli/powerline,blindFS/powerline,seanfisk/powerline,lukw00/powerline,darac/powerline,darac/powerline,seanfisk/powerline,kenrachynski/powerline,areteix/powerline,areteix/powerline,S0lll0s/powerline,areteix/powerline,xxxhycl2010/powerline,bezhermoso/powerline,blindFS/powerline,russellb/powerline,prvnkumar/powerline,junix/powerline,xxxhycl2010/powerline,Luffin/powerline,DoctorJellyface/powerline,blindFS/powerline,cyrixhero/powerline,junix/powerline,dragon788/powerline,s0undt3ch/powerline,Luffin/powerline,cyrixhero/powerline,russellb/powerline,Liangjianghao/powerline,EricSB/powerline,bartvm/powerline,DoctorJellyface/powerline,lukw00/powerline,Liangjianghao/powerline,xxxhycl2010/powerline,dragon788/powerline,prvnkumar/powerline,QuLogic/powerline,kenrachynski/powerline,seanfisk/powerline,bezhermoso/powerline,S0lll0s/powerline
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
Allow it to configure side
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, side='right', **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
self.side = side
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side=self.side)
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
<commit_before># vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
<commit_msg>Allow it to configure side<commit_after>
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, side='right', **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
self.side = side
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side=self.side)
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
Allow it to configure side# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, side='right', **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
self.side = side
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side=self.side)
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
<commit_before># vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
<commit_msg>Allow it to configure side<commit_after># vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, side='right', **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
self.side = side
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side=self.side)
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
c5cadfb774e7d18da656087a113e9d2f9fec4e48
|
lacrm/_version.py
|
lacrm/_version.py
|
__version_info__ = (0, 1, 5)
__version__ = '.'.join(map(str, __version_info__))
|
__version_info__ = (1, 0, 0)
__version__ = '.'.join(map(str, __version_info__))
|
Bump version number to 1.0.0
|
Bump version number to 1.0.0
|
Python
|
mit
|
HighMileage/lacrm
|
__version_info__ = (0, 1, 5)
__version__ = '.'.join(map(str, __version_info__))
Bump version number to 1.0.0
|
__version_info__ = (1, 0, 0)
__version__ = '.'.join(map(str, __version_info__))
|
<commit_before>__version_info__ = (0, 1, 5)
__version__ = '.'.join(map(str, __version_info__))
<commit_msg>Bump version number to 1.0.0<commit_after>
|
__version_info__ = (1, 0, 0)
__version__ = '.'.join(map(str, __version_info__))
|
__version_info__ = (0, 1, 5)
__version__ = '.'.join(map(str, __version_info__))
Bump version number to 1.0.0__version_info__ = (1, 0, 0)
__version__ = '.'.join(map(str, __version_info__))
|
<commit_before>__version_info__ = (0, 1, 5)
__version__ = '.'.join(map(str, __version_info__))
<commit_msg>Bump version number to 1.0.0<commit_after>__version_info__ = (1, 0, 0)
__version__ = '.'.join(map(str, __version_info__))
|
2f6e53a12975dc4e15ba8b85e4df409868ec4df9
|
tests/test_utils.py
|
tests/test_utils.py
|
# Copyright 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import sys
import unittest2
from ceilometerclient.common import utils
class UtilsTest(unittest2.TestCase):
def test_prettytable(self):
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
# test that the prettytable output is wellformatted (left-aligned)
columns = ['ID', 'Name']
val = ['Name1', 'another', 'veeeery long']
images = [Struct(**{'id': i ** 16, 'name': val[i]})
for i in range(len(val))]
saved_stdout = sys.stdout
try:
sys.stdout = output_dict = cStringIO.StringIO()
utils.print_dict({'K': 'k', 'Key': 'Value'})
finally:
sys.stdout = saved_stdout
self.assertEqual(output_dict.getvalue(), '''\
+----------+-------+
| Property | Value |
+----------+-------+
| K | k |
| Key | Value |
+----------+-------+
''')
|
# Copyright 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import sys
import unittest2
from ceilometerclient.common import utils
class UtilsTest(unittest2.TestCase):
def test_prettytable(self):
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
# test that the prettytable output is wellformatted (left-aligned)
saved_stdout = sys.stdout
try:
sys.stdout = output_dict = cStringIO.StringIO()
utils.print_dict({'K': 'k', 'Key': 'Value'})
finally:
sys.stdout = saved_stdout
self.assertEqual(output_dict.getvalue(), '''\
+----------+-------+
| Property | Value |
+----------+-------+
| K | k |
| Key | Value |
+----------+-------+
''')
|
Remove unused test code in test_util.py
|
Remove unused test code in test_util.py
This doesn't seem to do anything
Change-Id: Ieba6b5f7229680146f9b3f2ae2f3f2d2b1354376
|
Python
|
apache-2.0
|
citrix-openstack-build/python-ceilometerclient,citrix-openstack-build/python-ceilometerclient
|
# Copyright 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import sys
import unittest2
from ceilometerclient.common import utils
class UtilsTest(unittest2.TestCase):
def test_prettytable(self):
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
# test that the prettytable output is wellformatted (left-aligned)
columns = ['ID', 'Name']
val = ['Name1', 'another', 'veeeery long']
images = [Struct(**{'id': i ** 16, 'name': val[i]})
for i in range(len(val))]
saved_stdout = sys.stdout
try:
sys.stdout = output_dict = cStringIO.StringIO()
utils.print_dict({'K': 'k', 'Key': 'Value'})
finally:
sys.stdout = saved_stdout
self.assertEqual(output_dict.getvalue(), '''\
+----------+-------+
| Property | Value |
+----------+-------+
| K | k |
| Key | Value |
+----------+-------+
''')
Remove unused test code in test_util.py
This doesn't seem to do anything
Change-Id: Ieba6b5f7229680146f9b3f2ae2f3f2d2b1354376
|
# Copyright 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import sys
import unittest2
from ceilometerclient.common import utils
class UtilsTest(unittest2.TestCase):
def test_prettytable(self):
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
# test that the prettytable output is wellformatted (left-aligned)
saved_stdout = sys.stdout
try:
sys.stdout = output_dict = cStringIO.StringIO()
utils.print_dict({'K': 'k', 'Key': 'Value'})
finally:
sys.stdout = saved_stdout
self.assertEqual(output_dict.getvalue(), '''\
+----------+-------+
| Property | Value |
+----------+-------+
| K | k |
| Key | Value |
+----------+-------+
''')
|
<commit_before># Copyright 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import sys
import unittest2
from ceilometerclient.common import utils
class UtilsTest(unittest2.TestCase):
def test_prettytable(self):
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
# test that the prettytable output is wellformatted (left-aligned)
columns = ['ID', 'Name']
val = ['Name1', 'another', 'veeeery long']
images = [Struct(**{'id': i ** 16, 'name': val[i]})
for i in range(len(val))]
saved_stdout = sys.stdout
try:
sys.stdout = output_dict = cStringIO.StringIO()
utils.print_dict({'K': 'k', 'Key': 'Value'})
finally:
sys.stdout = saved_stdout
self.assertEqual(output_dict.getvalue(), '''\
+----------+-------+
| Property | Value |
+----------+-------+
| K | k |
| Key | Value |
+----------+-------+
''')
<commit_msg>Remove unused test code in test_util.py
This doesn't seem to do anything
Change-Id: Ieba6b5f7229680146f9b3f2ae2f3f2d2b1354376<commit_after>
|
# Copyright 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import sys
import unittest2
from ceilometerclient.common import utils
class UtilsTest(unittest2.TestCase):
def test_prettytable(self):
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
# test that the prettytable output is wellformatted (left-aligned)
saved_stdout = sys.stdout
try:
sys.stdout = output_dict = cStringIO.StringIO()
utils.print_dict({'K': 'k', 'Key': 'Value'})
finally:
sys.stdout = saved_stdout
self.assertEqual(output_dict.getvalue(), '''\
+----------+-------+
| Property | Value |
+----------+-------+
| K | k |
| Key | Value |
+----------+-------+
''')
|
# Copyright 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import sys
import unittest2
from ceilometerclient.common import utils
class UtilsTest(unittest2.TestCase):
def test_prettytable(self):
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
# test that the prettytable output is wellformatted (left-aligned)
columns = ['ID', 'Name']
val = ['Name1', 'another', 'veeeery long']
images = [Struct(**{'id': i ** 16, 'name': val[i]})
for i in range(len(val))]
saved_stdout = sys.stdout
try:
sys.stdout = output_dict = cStringIO.StringIO()
utils.print_dict({'K': 'k', 'Key': 'Value'})
finally:
sys.stdout = saved_stdout
self.assertEqual(output_dict.getvalue(), '''\
+----------+-------+
| Property | Value |
+----------+-------+
| K | k |
| Key | Value |
+----------+-------+
''')
Remove unused test code in test_util.py
This doesn't seem to do anything
Change-Id: Ieba6b5f7229680146f9b3f2ae2f3f2d2b1354376# Copyright 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import sys
import unittest2
from ceilometerclient.common import utils
class UtilsTest(unittest2.TestCase):
def test_prettytable(self):
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
# test that the prettytable output is wellformatted (left-aligned)
saved_stdout = sys.stdout
try:
sys.stdout = output_dict = cStringIO.StringIO()
utils.print_dict({'K': 'k', 'Key': 'Value'})
finally:
sys.stdout = saved_stdout
self.assertEqual(output_dict.getvalue(), '''\
+----------+-------+
| Property | Value |
+----------+-------+
| K | k |
| Key | Value |
+----------+-------+
''')
|
<commit_before># Copyright 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import sys
import unittest2
from ceilometerclient.common import utils
class UtilsTest(unittest2.TestCase):
def test_prettytable(self):
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
# test that the prettytable output is wellformatted (left-aligned)
columns = ['ID', 'Name']
val = ['Name1', 'another', 'veeeery long']
images = [Struct(**{'id': i ** 16, 'name': val[i]})
for i in range(len(val))]
saved_stdout = sys.stdout
try:
sys.stdout = output_dict = cStringIO.StringIO()
utils.print_dict({'K': 'k', 'Key': 'Value'})
finally:
sys.stdout = saved_stdout
self.assertEqual(output_dict.getvalue(), '''\
+----------+-------+
| Property | Value |
+----------+-------+
| K | k |
| Key | Value |
+----------+-------+
''')
<commit_msg>Remove unused test code in test_util.py
This doesn't seem to do anything
Change-Id: Ieba6b5f7229680146f9b3f2ae2f3f2d2b1354376<commit_after># Copyright 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import sys
import unittest2
from ceilometerclient.common import utils
class UtilsTest(unittest2.TestCase):
def test_prettytable(self):
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
# test that the prettytable output is wellformatted (left-aligned)
saved_stdout = sys.stdout
try:
sys.stdout = output_dict = cStringIO.StringIO()
utils.print_dict({'K': 'k', 'Key': 'Value'})
finally:
sys.stdout = saved_stdout
self.assertEqual(output_dict.getvalue(), '''\
+----------+-------+
| Property | Value |
+----------+-------+
| K | k |
| Key | Value |
+----------+-------+
''')
|
b4b7185a054d07097e743664abda44e121674b8b
|
talks_keeper/forms.py
|
talks_keeper/forms.py
|
from django import forms
from .models import Label, Talk
class TalkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(TalkForm, self).__init__(*args, **kwargs)
labels = Label.objects.all()
for label_ in labels:
self.fields.update({
'label_{}'.format(label_.id): forms.BooleanField(
label=label_.name,
required=False,
)})
class Meta:
model = Talk
exclude = ['company']
def save(self):
talk = super(TalkForm, self).save()
for label_ in Label.objects.all():
if self.cleaned_data['label_{}'.format(label_.id)]:
label_.talks.add(talk)
|
from django import forms
from .models import Label, Talk
class TalkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(TalkForm, self).__init__(*args, **kwargs)
instance = kwargs['instance']
labels = Label.objects.all()
for label_ in labels:
if instance is None:
initial = False
else:
initial = label_.talks.filter(id=instance.id).exists()
self.fields.update({
'label_{}'.format(label_.id): forms.BooleanField(
label=label_.name,
required=False,
initial=initial,
)})
class Meta:
model = Talk
exclude = ['company']
def save(self, commit=True):
talk = super(TalkForm, self).save()
for label_ in Label.objects.all():
if self.cleaned_data['label_{}'.format(label_.id)]:
label_.talks.add(talk)
|
Update TalkForm to use checked labels
|
Update TalkForm to use checked labels
|
Python
|
mit
|
samitnuk/talks_keeper,samitnuk/talks_keeper,samitnuk/talks_keeper
|
from django import forms
from .models import Label, Talk
class TalkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(TalkForm, self).__init__(*args, **kwargs)
labels = Label.objects.all()
for label_ in labels:
self.fields.update({
'label_{}'.format(label_.id): forms.BooleanField(
label=label_.name,
required=False,
)})
class Meta:
model = Talk
exclude = ['company']
def save(self):
talk = super(TalkForm, self).save()
for label_ in Label.objects.all():
if self.cleaned_data['label_{}'.format(label_.id)]:
label_.talks.add(talk)
Update TalkForm to use checked labels
|
from django import forms
from .models import Label, Talk
class TalkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(TalkForm, self).__init__(*args, **kwargs)
instance = kwargs['instance']
labels = Label.objects.all()
for label_ in labels:
if instance is None:
initial = False
else:
initial = label_.talks.filter(id=instance.id).exists()
self.fields.update({
'label_{}'.format(label_.id): forms.BooleanField(
label=label_.name,
required=False,
initial=initial,
)})
class Meta:
model = Talk
exclude = ['company']
def save(self, commit=True):
talk = super(TalkForm, self).save()
for label_ in Label.objects.all():
if self.cleaned_data['label_{}'.format(label_.id)]:
label_.talks.add(talk)
|
<commit_before>from django import forms
from .models import Label, Talk
class TalkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(TalkForm, self).__init__(*args, **kwargs)
labels = Label.objects.all()
for label_ in labels:
self.fields.update({
'label_{}'.format(label_.id): forms.BooleanField(
label=label_.name,
required=False,
)})
class Meta:
model = Talk
exclude = ['company']
def save(self):
talk = super(TalkForm, self).save()
for label_ in Label.objects.all():
if self.cleaned_data['label_{}'.format(label_.id)]:
label_.talks.add(talk)
<commit_msg>Update TalkForm to use checked labels<commit_after>
|
from django import forms
from .models import Label, Talk
class TalkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(TalkForm, self).__init__(*args, **kwargs)
instance = kwargs['instance']
labels = Label.objects.all()
for label_ in labels:
if instance is None:
initial = False
else:
initial = label_.talks.filter(id=instance.id).exists()
self.fields.update({
'label_{}'.format(label_.id): forms.BooleanField(
label=label_.name,
required=False,
initial=initial,
)})
class Meta:
model = Talk
exclude = ['company']
def save(self, commit=True):
talk = super(TalkForm, self).save()
for label_ in Label.objects.all():
if self.cleaned_data['label_{}'.format(label_.id)]:
label_.talks.add(talk)
|
from django import forms
from .models import Label, Talk
class TalkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(TalkForm, self).__init__(*args, **kwargs)
labels = Label.objects.all()
for label_ in labels:
self.fields.update({
'label_{}'.format(label_.id): forms.BooleanField(
label=label_.name,
required=False,
)})
class Meta:
model = Talk
exclude = ['company']
def save(self):
talk = super(TalkForm, self).save()
for label_ in Label.objects.all():
if self.cleaned_data['label_{}'.format(label_.id)]:
label_.talks.add(talk)
Update TalkForm to use checked labelsfrom django import forms
from .models import Label, Talk
class TalkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(TalkForm, self).__init__(*args, **kwargs)
instance = kwargs['instance']
labels = Label.objects.all()
for label_ in labels:
if instance is None:
initial = False
else:
initial = label_.talks.filter(id=instance.id).exists()
self.fields.update({
'label_{}'.format(label_.id): forms.BooleanField(
label=label_.name,
required=False,
initial=initial,
)})
class Meta:
model = Talk
exclude = ['company']
def save(self, commit=True):
talk = super(TalkForm, self).save()
for label_ in Label.objects.all():
if self.cleaned_data['label_{}'.format(label_.id)]:
label_.talks.add(talk)
|
<commit_before>from django import forms
from .models import Label, Talk
class TalkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(TalkForm, self).__init__(*args, **kwargs)
labels = Label.objects.all()
for label_ in labels:
self.fields.update({
'label_{}'.format(label_.id): forms.BooleanField(
label=label_.name,
required=False,
)})
class Meta:
model = Talk
exclude = ['company']
def save(self):
talk = super(TalkForm, self).save()
for label_ in Label.objects.all():
if self.cleaned_data['label_{}'.format(label_.id)]:
label_.talks.add(talk)
<commit_msg>Update TalkForm to use checked labels<commit_after>from django import forms
from .models import Label, Talk
class TalkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(TalkForm, self).__init__(*args, **kwargs)
instance = kwargs['instance']
labels = Label.objects.all()
for label_ in labels:
if instance is None:
initial = False
else:
initial = label_.talks.filter(id=instance.id).exists()
self.fields.update({
'label_{}'.format(label_.id): forms.BooleanField(
label=label_.name,
required=False,
initial=initial,
)})
class Meta:
model = Talk
exclude = ['company']
def save(self, commit=True):
talk = super(TalkForm, self).save()
for label_ in Label.objects.all():
if self.cleaned_data['label_{}'.format(label_.id)]:
label_.talks.add(talk)
|
6df1e7a7f0987efc8e34c521e8c4de9a75f9dfde
|
troposphere/auth.py
|
troposphere/auth.py
|
import logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info(hasattr(user, "auth_tokens"))
non_expired_tokens = user.auth_tokens.filter(only_current_tokens())
return len(non_expired_tokens) > 0
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info(hasattr(user, "auth_tokens"))
return user.auth_tokens.filter(only_current_tokens())
|
import logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens()).exists()
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens())
|
Use exists() check from QuerySet; give logger-info context
|
Use exists() check from QuerySet; give logger-info context
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend
|
import logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info(hasattr(user, "auth_tokens"))
non_expired_tokens = user.auth_tokens.filter(only_current_tokens())
return len(non_expired_tokens) > 0
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info(hasattr(user, "auth_tokens"))
return user.auth_tokens.filter(only_current_tokens())
Use exists() check from QuerySet; give logger-info context
|
import logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens()).exists()
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens())
|
<commit_before>import logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info(hasattr(user, "auth_tokens"))
non_expired_tokens = user.auth_tokens.filter(only_current_tokens())
return len(non_expired_tokens) > 0
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info(hasattr(user, "auth_tokens"))
return user.auth_tokens.filter(only_current_tokens())
<commit_msg>Use exists() check from QuerySet; give logger-info context<commit_after>
|
import logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens()).exists()
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens())
|
import logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info(hasattr(user, "auth_tokens"))
non_expired_tokens = user.auth_tokens.filter(only_current_tokens())
return len(non_expired_tokens) > 0
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info(hasattr(user, "auth_tokens"))
return user.auth_tokens.filter(only_current_tokens())
Use exists() check from QuerySet; give logger-info contextimport logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens()).exists()
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens())
|
<commit_before>import logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info(hasattr(user, "auth_tokens"))
non_expired_tokens = user.auth_tokens.filter(only_current_tokens())
return len(non_expired_tokens) > 0
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info(hasattr(user, "auth_tokens"))
return user.auth_tokens.filter(only_current_tokens())
<commit_msg>Use exists() check from QuerySet; give logger-info context<commit_after>import logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens()).exists()
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens())
|
8dcf6c373316d21399fa1edd276cea357fea75fb
|
groundstation/sockets/stream_socket.py
|
groundstation/sockets/stream_socket.py
|
import socket
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
from groundstation.peer_socket import PeerSocket
class StreamSocket(object):
"""Wraps a TCP socket"""
def __init__(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# XXX Implement the queue as a seperate class/
self.write_queue = []
def fileno(self):
"""Return the underlying socket to make select() work"""
return self._sock.fileno()
@property
def socket(self):
return self._sock
def accept(self):
p = self._sock.accept()
log.info("Accepted a connection from %s" % repr(p[1]))
return PeerSocket.from_accept(p)
def enqueue(self, data):
"""Enqueues data for writing inside the select loop"""
self.write_queue.insert(0, data)
def send(self):
data = self.write_queue.pop()
log.info("Attempting to write %i bytes" % (len(data)))
self._sock.send(data)
def has_data_ready(self):
"""(bool) does this socket have enqueued data ready"""
return len(self.write_queue) > 0
|
import socket
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
from groundstation.peer_socket import PeerSocket
class StreamSocket(object):
"""Wraps a TCP socket"""
def __init__(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# XXX Implement the queue as a seperate class/
self.write_queue = []
def fileno(self):
"""Return the underlying socket to make select() work"""
return self._sock.fileno()
@property
def socket(self):
return self._sock
def accept(self):
p = self._sock.accept()
log.info("Accepted a connection from %s" % repr(p[1]))
return PeerSocket.from_accept(p)
def enqueue(self, data):
"""Enqueues data for writing inside the select loop"""
if hasattr(data, "SerializeToString"):
data = data.SerializeToString()
self.write_queue.insert(0, data)
def send(self):
data = self.write_queue.pop()
log.info("Attempting to write %i bytes" % (len(data)))
self._sock.send(data)
def has_data_ready(self):
"""(bool) does this socket have enqueued data ready"""
return len(self.write_queue) > 0
|
Support being given protobuf Messages
|
Support being given protobuf Messages
|
Python
|
mit
|
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
|
import socket
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
from groundstation.peer_socket import PeerSocket
class StreamSocket(object):
"""Wraps a TCP socket"""
def __init__(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# XXX Implement the queue as a seperate class/
self.write_queue = []
def fileno(self):
"""Return the underlying socket to make select() work"""
return self._sock.fileno()
@property
def socket(self):
return self._sock
def accept(self):
p = self._sock.accept()
log.info("Accepted a connection from %s" % repr(p[1]))
return PeerSocket.from_accept(p)
def enqueue(self, data):
"""Enqueues data for writing inside the select loop"""
self.write_queue.insert(0, data)
def send(self):
data = self.write_queue.pop()
log.info("Attempting to write %i bytes" % (len(data)))
self._sock.send(data)
def has_data_ready(self):
"""(bool) does this socket have enqueued data ready"""
return len(self.write_queue) > 0
Support being given protobuf Messages
|
import socket
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
from groundstation.peer_socket import PeerSocket
class StreamSocket(object):
"""Wraps a TCP socket"""
def __init__(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# XXX Implement the queue as a seperate class/
self.write_queue = []
def fileno(self):
"""Return the underlying socket to make select() work"""
return self._sock.fileno()
@property
def socket(self):
return self._sock
def accept(self):
p = self._sock.accept()
log.info("Accepted a connection from %s" % repr(p[1]))
return PeerSocket.from_accept(p)
def enqueue(self, data):
"""Enqueues data for writing inside the select loop"""
if hasattr(data, "SerializeToString"):
data = data.SerializeToString()
self.write_queue.insert(0, data)
def send(self):
data = self.write_queue.pop()
log.info("Attempting to write %i bytes" % (len(data)))
self._sock.send(data)
def has_data_ready(self):
"""(bool) does this socket have enqueued data ready"""
return len(self.write_queue) > 0
|
<commit_before>import socket
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
from groundstation.peer_socket import PeerSocket
class StreamSocket(object):
"""Wraps a TCP socket"""
def __init__(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# XXX Implement the queue as a seperate class/
self.write_queue = []
def fileno(self):
"""Return the underlying socket to make select() work"""
return self._sock.fileno()
@property
def socket(self):
return self._sock
def accept(self):
p = self._sock.accept()
log.info("Accepted a connection from %s" % repr(p[1]))
return PeerSocket.from_accept(p)
def enqueue(self, data):
"""Enqueues data for writing inside the select loop"""
self.write_queue.insert(0, data)
def send(self):
data = self.write_queue.pop()
log.info("Attempting to write %i bytes" % (len(data)))
self._sock.send(data)
def has_data_ready(self):
"""(bool) does this socket have enqueued data ready"""
return len(self.write_queue) > 0
<commit_msg>Support being given protobuf Messages<commit_after>
|
import socket
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
from groundstation.peer_socket import PeerSocket
class StreamSocket(object):
"""Wraps a TCP socket"""
def __init__(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# XXX Implement the queue as a seperate class/
self.write_queue = []
def fileno(self):
"""Return the underlying socket to make select() work"""
return self._sock.fileno()
@property
def socket(self):
return self._sock
def accept(self):
p = self._sock.accept()
log.info("Accepted a connection from %s" % repr(p[1]))
return PeerSocket.from_accept(p)
def enqueue(self, data):
"""Enqueues data for writing inside the select loop"""
if hasattr(data, "SerializeToString"):
data = data.SerializeToString()
self.write_queue.insert(0, data)
def send(self):
data = self.write_queue.pop()
log.info("Attempting to write %i bytes" % (len(data)))
self._sock.send(data)
def has_data_ready(self):
"""(bool) does this socket have enqueued data ready"""
return len(self.write_queue) > 0
|
import socket
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
from groundstation.peer_socket import PeerSocket
class StreamSocket(object):
"""Wraps a TCP socket"""
def __init__(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# XXX Implement the queue as a seperate class/
self.write_queue = []
def fileno(self):
"""Return the underlying socket to make select() work"""
return self._sock.fileno()
@property
def socket(self):
return self._sock
def accept(self):
p = self._sock.accept()
log.info("Accepted a connection from %s" % repr(p[1]))
return PeerSocket.from_accept(p)
def enqueue(self, data):
"""Enqueues data for writing inside the select loop"""
self.write_queue.insert(0, data)
def send(self):
data = self.write_queue.pop()
log.info("Attempting to write %i bytes" % (len(data)))
self._sock.send(data)
def has_data_ready(self):
"""(bool) does this socket have enqueued data ready"""
return len(self.write_queue) > 0
Support being given protobuf Messagesimport socket
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
from groundstation.peer_socket import PeerSocket
class StreamSocket(object):
"""Wraps a TCP socket"""
def __init__(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# XXX Implement the queue as a seperate class/
self.write_queue = []
def fileno(self):
"""Return the underlying socket to make select() work"""
return self._sock.fileno()
@property
def socket(self):
return self._sock
def accept(self):
p = self._sock.accept()
log.info("Accepted a connection from %s" % repr(p[1]))
return PeerSocket.from_accept(p)
def enqueue(self, data):
"""Enqueues data for writing inside the select loop"""
if hasattr(data, "SerializeToString"):
data = data.SerializeToString()
self.write_queue.insert(0, data)
def send(self):
data = self.write_queue.pop()
log.info("Attempting to write %i bytes" % (len(data)))
self._sock.send(data)
def has_data_ready(self):
"""(bool) does this socket have enqueued data ready"""
return len(self.write_queue) > 0
|
<commit_before>import socket
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
from groundstation.peer_socket import PeerSocket
class StreamSocket(object):
"""Wraps a TCP socket"""
def __init__(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# XXX Implement the queue as a seperate class/
self.write_queue = []
def fileno(self):
"""Return the underlying socket to make select() work"""
return self._sock.fileno()
@property
def socket(self):
return self._sock
def accept(self):
p = self._sock.accept()
log.info("Accepted a connection from %s" % repr(p[1]))
return PeerSocket.from_accept(p)
def enqueue(self, data):
"""Enqueues data for writing inside the select loop"""
self.write_queue.insert(0, data)
def send(self):
data = self.write_queue.pop()
log.info("Attempting to write %i bytes" % (len(data)))
self._sock.send(data)
def has_data_ready(self):
"""(bool) does this socket have enqueued data ready"""
return len(self.write_queue) > 0
<commit_msg>Support being given protobuf Messages<commit_after>import socket
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
from groundstation.peer_socket import PeerSocket
class StreamSocket(object):
"""Wraps a TCP socket"""
def __init__(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# XXX Implement the queue as a seperate class/
self.write_queue = []
def fileno(self):
"""Return the underlying socket to make select() work"""
return self._sock.fileno()
@property
def socket(self):
return self._sock
def accept(self):
p = self._sock.accept()
log.info("Accepted a connection from %s" % repr(p[1]))
return PeerSocket.from_accept(p)
def enqueue(self, data):
"""Enqueues data for writing inside the select loop"""
if hasattr(data, "SerializeToString"):
data = data.SerializeToString()
self.write_queue.insert(0, data)
def send(self):
data = self.write_queue.pop()
log.info("Attempting to write %i bytes" % (len(data)))
self._sock.send(data)
def has_data_ready(self):
"""(bool) does this socket have enqueued data ready"""
return len(self.write_queue) > 0
|
90a933fcfa52c6ebc41e810b3c851cca696f1e71
|
project/apps/api/management/commands/denormalize.py
|
project/apps/api/management/commands/denormalize.py
|
from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
for t in ts:
t.rank()
return "Done"
|
from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
return "Done"
|
Remove ranking from denormalization command
|
Remove ranking from denormalization command
|
Python
|
bsd-2-clause
|
barberscore/barberscore-api,dbinetti/barberscore-django,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,dbinetti/barberscore,barberscore/barberscore-api,barberscore/barberscore-api
|
from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
for t in ts:
t.rank()
return "Done"
Remove ranking from denormalization command
|
from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
return "Done"
|
<commit_before>from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
for t in ts:
t.rank()
return "Done"
<commit_msg>Remove ranking from denormalization command<commit_after>
|
from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
return "Done"
|
from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
for t in ts:
t.rank()
return "Done"
Remove ranking from denormalization commandfrom django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
return "Done"
|
<commit_before>from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
for t in ts:
t.rank()
return "Done"
<commit_msg>Remove ranking from denormalization command<commit_after>from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
return "Done"
|
68bc2d2b50e754d50f1a2f85fa7dbde0ca8a6a12
|
qual/tests/test_iso.py
|
qual/tests/test_iso.py
|
import unittest
from hypothesis import given
from hypothesis.strategies import integers
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date, MINYEAR, MAXYEAR
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
@given(integers(MINYEAR, MAXYEAR), integers(1, 52), integers(1, 7))
def test_round_trip_iso_date(self, year, week, day):
y, w, d = qual.iso_to_gregorian(year, week, day).isocalendar()
self.assertEqual(year, y)
self.assertEqual(week, w)
self.assertEqual(day, d)
@given(integers(MINYEAR, MAXYEAR), integers(54), integers(1, 7))
def test_weeks_greater_than_53_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
|
import unittest
from hypothesis import given
from hypothesis.strategies import integers
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date, MINYEAR, MAXYEAR
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
@given(integers(MINYEAR, MAXYEAR), integers(1, 52), integers(1, 7))
def test_round_trip_iso_date(self, year, week, day):
y, w, d = qual.iso_to_gregorian(year, week, day).isocalendar()
self.assertEqual(year, y)
self.assertEqual(week, w)
self.assertEqual(day, d)
@given(integers(MINYEAR, MAXYEAR), integers(54), integers(1, 7))
def test_weeks_greater_than_53_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
@given(integers(MINYEAR, MAXYEAR), integers(None, 0), integers(1, 7))
def test_weeks_smaller_than_1_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
|
Add a new passing test for invalid week numbers.
|
Add a new passing test for invalid week numbers.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
import unittest
from hypothesis import given
from hypothesis.strategies import integers
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date, MINYEAR, MAXYEAR
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
@given(integers(MINYEAR, MAXYEAR), integers(1, 52), integers(1, 7))
def test_round_trip_iso_date(self, year, week, day):
y, w, d = qual.iso_to_gregorian(year, week, day).isocalendar()
self.assertEqual(year, y)
self.assertEqual(week, w)
self.assertEqual(day, d)
@given(integers(MINYEAR, MAXYEAR), integers(54), integers(1, 7))
def test_weeks_greater_than_53_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
Add a new passing test for invalid week numbers.
|
import unittest
from hypothesis import given
from hypothesis.strategies import integers
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date, MINYEAR, MAXYEAR
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
@given(integers(MINYEAR, MAXYEAR), integers(1, 52), integers(1, 7))
def test_round_trip_iso_date(self, year, week, day):
y, w, d = qual.iso_to_gregorian(year, week, day).isocalendar()
self.assertEqual(year, y)
self.assertEqual(week, w)
self.assertEqual(day, d)
@given(integers(MINYEAR, MAXYEAR), integers(54), integers(1, 7))
def test_weeks_greater_than_53_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
@given(integers(MINYEAR, MAXYEAR), integers(None, 0), integers(1, 7))
def test_weeks_smaller_than_1_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
|
<commit_before>import unittest
from hypothesis import given
from hypothesis.strategies import integers
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date, MINYEAR, MAXYEAR
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
@given(integers(MINYEAR, MAXYEAR), integers(1, 52), integers(1, 7))
def test_round_trip_iso_date(self, year, week, day):
y, w, d = qual.iso_to_gregorian(year, week, day).isocalendar()
self.assertEqual(year, y)
self.assertEqual(week, w)
self.assertEqual(day, d)
@given(integers(MINYEAR, MAXYEAR), integers(54), integers(1, 7))
def test_weeks_greater_than_53_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
<commit_msg>Add a new passing test for invalid week numbers.<commit_after>
|
import unittest
from hypothesis import given
from hypothesis.strategies import integers
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date, MINYEAR, MAXYEAR
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
@given(integers(MINYEAR, MAXYEAR), integers(1, 52), integers(1, 7))
def test_round_trip_iso_date(self, year, week, day):
y, w, d = qual.iso_to_gregorian(year, week, day).isocalendar()
self.assertEqual(year, y)
self.assertEqual(week, w)
self.assertEqual(day, d)
@given(integers(MINYEAR, MAXYEAR), integers(54), integers(1, 7))
def test_weeks_greater_than_53_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
@given(integers(MINYEAR, MAXYEAR), integers(None, 0), integers(1, 7))
def test_weeks_smaller_than_1_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
|
import unittest
from hypothesis import given
from hypothesis.strategies import integers
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date, MINYEAR, MAXYEAR
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
@given(integers(MINYEAR, MAXYEAR), integers(1, 52), integers(1, 7))
def test_round_trip_iso_date(self, year, week, day):
y, w, d = qual.iso_to_gregorian(year, week, day).isocalendar()
self.assertEqual(year, y)
self.assertEqual(week, w)
self.assertEqual(day, d)
@given(integers(MINYEAR, MAXYEAR), integers(54), integers(1, 7))
def test_weeks_greater_than_53_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
Add a new passing test for invalid week numbers.import unittest
from hypothesis import given
from hypothesis.strategies import integers
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date, MINYEAR, MAXYEAR
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
@given(integers(MINYEAR, MAXYEAR), integers(1, 52), integers(1, 7))
def test_round_trip_iso_date(self, year, week, day):
y, w, d = qual.iso_to_gregorian(year, week, day).isocalendar()
self.assertEqual(year, y)
self.assertEqual(week, w)
self.assertEqual(day, d)
@given(integers(MINYEAR, MAXYEAR), integers(54), integers(1, 7))
def test_weeks_greater_than_53_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
@given(integers(MINYEAR, MAXYEAR), integers(None, 0), integers(1, 7))
def test_weeks_smaller_than_1_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
|
<commit_before>import unittest
from hypothesis import given
from hypothesis.strategies import integers
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date, MINYEAR, MAXYEAR
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
@given(integers(MINYEAR, MAXYEAR), integers(1, 52), integers(1, 7))
def test_round_trip_iso_date(self, year, week, day):
y, w, d = qual.iso_to_gregorian(year, week, day).isocalendar()
self.assertEqual(year, y)
self.assertEqual(week, w)
self.assertEqual(day, d)
@given(integers(MINYEAR, MAXYEAR), integers(54), integers(1, 7))
def test_weeks_greater_than_53_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
<commit_msg>Add a new passing test for invalid week numbers.<commit_after>import unittest
from hypothesis import given
from hypothesis.strategies import integers
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date, MINYEAR, MAXYEAR
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
@given(integers(MINYEAR, MAXYEAR), integers(1, 52), integers(1, 7))
def test_round_trip_iso_date(self, year, week, day):
y, w, d = qual.iso_to_gregorian(year, week, day).isocalendar()
self.assertEqual(year, y)
self.assertEqual(week, w)
self.assertEqual(day, d)
@given(integers(MINYEAR, MAXYEAR), integers(54), integers(1, 7))
def test_weeks_greater_than_53_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
@given(integers(MINYEAR, MAXYEAR), integers(None, 0), integers(1, 7))
def test_weeks_smaller_than_1_fail(self, year, week, day):
self.assertRaises(ValueError, lambda : qual.iso_to_gregorian(year, week, day))
|
c1a1c976642fa1d8f17f89732f6c4fe5bd76d0de
|
devito/dimension.py
|
devito/dimension.py
|
import cgen
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
|
import cgen
import numpy as np
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
@property
def dtype(self):
"""The data type of the iteration variable"""
return np.int32
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
|
Add dtype of iteration variable
|
Dimension: Add dtype of iteration variable
|
Python
|
mit
|
opesci/devito,opesci/devito
|
import cgen
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
Dimension: Add dtype of iteration variable
|
import cgen
import numpy as np
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
@property
def dtype(self):
"""The data type of the iteration variable"""
return np.int32
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
|
<commit_before>import cgen
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
<commit_msg>Dimension: Add dtype of iteration variable<commit_after>
|
import cgen
import numpy as np
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
@property
def dtype(self):
"""The data type of the iteration variable"""
return np.int32
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
|
import cgen
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
Dimension: Add dtype of iteration variableimport cgen
import numpy as np
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
@property
def dtype(self):
"""The data type of the iteration variable"""
return np.int32
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
|
<commit_before>import cgen
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
<commit_msg>Dimension: Add dtype of iteration variable<commit_after>import cgen
import numpy as np
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
@property
def dtype(self):
"""The data type of the iteration variable"""
return np.int32
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
|
2737723b9f0bae0166e63a7a79d4d89bac3a82d9
|
test_passwd_change.py
|
test_passwd_change.py
|
#!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import os
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv' ,'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def tearDown(self):
try:
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
except:
subprocess.call(['rm', '-r', 'test/'])
raise
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
|
#!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import os
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv' ,'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def tearDown(self):
try:
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
except:
raise
else:
subprocess.call(['rm', '-r', 'test/'])
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
|
Fix error - rm test dir command is not executed in correct branch.
|
Fix error - rm test dir command is not executed in correct branch.
|
Python
|
mit
|
maxsocl/oldmailer
|
#!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import os
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv' ,'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def tearDown(self):
try:
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
except:
subprocess.call(['rm', '-r', 'test/'])
raise
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
Fix error - rm test dir command is not executed in correct branch.
|
#!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import os
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv' ,'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def tearDown(self):
try:
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
except:
raise
else:
subprocess.call(['rm', '-r', 'test/'])
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
|
<commit_before>#!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import os
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv' ,'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def tearDown(self):
try:
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
except:
subprocess.call(['rm', '-r', 'test/'])
raise
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
<commit_msg>Fix error - rm test dir command is not executed in correct branch.<commit_after>
|
#!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import os
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv' ,'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def tearDown(self):
try:
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
except:
raise
else:
subprocess.call(['rm', '-r', 'test/'])
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
|
#!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import os
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv' ,'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def tearDown(self):
try:
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
except:
subprocess.call(['rm', '-r', 'test/'])
raise
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
Fix error - rm test dir command is not executed in correct branch.#!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import os
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv' ,'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def tearDown(self):
try:
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
except:
raise
else:
subprocess.call(['rm', '-r', 'test/'])
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
|
<commit_before>#!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import os
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv' ,'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def tearDown(self):
try:
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
except:
subprocess.call(['rm', '-r', 'test/'])
raise
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
<commit_msg>Fix error - rm test dir command is not executed in correct branch.<commit_after>#!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import os
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv' ,'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def tearDown(self):
try:
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
except:
raise
else:
subprocess.call(['rm', '-r', 'test/'])
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
|
432abeacb5496c37bbdaabf7469a6df71e90376e
|
testing/test_BioID.py
|
testing/test_BioID.py
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
from BioID import BioID
# Nose test generator to iterate format test files defined in CSVs
class TestFormatDefinitions(object):
def test_formats(self):
with open("./testing/format_tests.csv", "rU") as formats_file:
test_files = formats_file.readlines()[1:]
for test_file in test_files:
filename, expected_format = test_file.rstrip(",\n").split(",")
yield self.check_format, filename, expected_format
@staticmethod
def check_format(test_file, expected_format):
# Putting the test file path here saves having to specify a path for each test file in the CSV
test_file_path = "./testing/testFiles/" + test_file
id_results = BioID("./formats.json").identify([test_file_path])
assert id_results[test_file_path] == expected_format
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
from BioID import BioID
# Nose test generator to iterate format test files defined in CSVs
class TestFormatDefinitions(object):
def test_formats(self):
with open("./testing/format_tests.csv", "rU") as formats_file:
test_files = formats_file.readlines()[1:]
for test_file in test_files:
filename, expected_format = test_file.rstrip(",\n").split(",")
yield self.check_format, filename, expected_format
@staticmethod
def check_format(test_file, expected_format):
# Putting the test file path here saves having to specify a path for each test file in the CSV
test_file_path = "./testing/testFiles/" + test_file
id_results = BioID("./formats.yml").identify([test_file_path])
assert id_results[test_file_path] == expected_format
|
Set testing script to use YAML file (oops)
|
Set testing script to use YAML file (oops)
|
Python
|
mit
|
LeeBergstrand/BioMagick,LeeBergstrand/BioMagick
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
from BioID import BioID
# Nose test generator to iterate format test files defined in CSVs
class TestFormatDefinitions(object):
def test_formats(self):
with open("./testing/format_tests.csv", "rU") as formats_file:
test_files = formats_file.readlines()[1:]
for test_file in test_files:
filename, expected_format = test_file.rstrip(",\n").split(",")
yield self.check_format, filename, expected_format
@staticmethod
def check_format(test_file, expected_format):
# Putting the test file path here saves having to specify a path for each test file in the CSV
test_file_path = "./testing/testFiles/" + test_file
id_results = BioID("./formats.json").identify([test_file_path])
assert id_results[test_file_path] == expected_formatSet testing script to use YAML file (oops)
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
from BioID import BioID
# Nose test generator to iterate format test files defined in CSVs
class TestFormatDefinitions(object):
def test_formats(self):
with open("./testing/format_tests.csv", "rU") as formats_file:
test_files = formats_file.readlines()[1:]
for test_file in test_files:
filename, expected_format = test_file.rstrip(",\n").split(",")
yield self.check_format, filename, expected_format
@staticmethod
def check_format(test_file, expected_format):
# Putting the test file path here saves having to specify a path for each test file in the CSV
test_file_path = "./testing/testFiles/" + test_file
id_results = BioID("./formats.yml").identify([test_file_path])
assert id_results[test_file_path] == expected_format
|
<commit_before>#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
from BioID import BioID
# Nose test generator to iterate format test files defined in CSVs
class TestFormatDefinitions(object):
def test_formats(self):
with open("./testing/format_tests.csv", "rU") as formats_file:
test_files = formats_file.readlines()[1:]
for test_file in test_files:
filename, expected_format = test_file.rstrip(",\n").split(",")
yield self.check_format, filename, expected_format
@staticmethod
def check_format(test_file, expected_format):
# Putting the test file path here saves having to specify a path for each test file in the CSV
test_file_path = "./testing/testFiles/" + test_file
id_results = BioID("./formats.json").identify([test_file_path])
assert id_results[test_file_path] == expected_format<commit_msg>Set testing script to use YAML file (oops)<commit_after>
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
from BioID import BioID
# Nose test generator to iterate format test files defined in CSVs
class TestFormatDefinitions(object):
def test_formats(self):
with open("./testing/format_tests.csv", "rU") as formats_file:
test_files = formats_file.readlines()[1:]
for test_file in test_files:
filename, expected_format = test_file.rstrip(",\n").split(",")
yield self.check_format, filename, expected_format
@staticmethod
def check_format(test_file, expected_format):
# Putting the test file path here saves having to specify a path for each test file in the CSV
test_file_path = "./testing/testFiles/" + test_file
id_results = BioID("./formats.yml").identify([test_file_path])
assert id_results[test_file_path] == expected_format
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
from BioID import BioID
# Nose test generator to iterate format test files defined in CSVs
class TestFormatDefinitions(object):
def test_formats(self):
with open("./testing/format_tests.csv", "rU") as formats_file:
test_files = formats_file.readlines()[1:]
for test_file in test_files:
filename, expected_format = test_file.rstrip(",\n").split(",")
yield self.check_format, filename, expected_format
@staticmethod
def check_format(test_file, expected_format):
# Putting the test file path here saves having to specify a path for each test file in the CSV
test_file_path = "./testing/testFiles/" + test_file
id_results = BioID("./formats.json").identify([test_file_path])
assert id_results[test_file_path] == expected_formatSet testing script to use YAML file (oops)#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
from BioID import BioID
# Nose test generator to iterate format test files defined in CSVs
class TestFormatDefinitions(object):
def test_formats(self):
with open("./testing/format_tests.csv", "rU") as formats_file:
test_files = formats_file.readlines()[1:]
for test_file in test_files:
filename, expected_format = test_file.rstrip(",\n").split(",")
yield self.check_format, filename, expected_format
@staticmethod
def check_format(test_file, expected_format):
# Putting the test file path here saves having to specify a path for each test file in the CSV
test_file_path = "./testing/testFiles/" + test_file
id_results = BioID("./formats.yml").identify([test_file_path])
assert id_results[test_file_path] == expected_format
|
<commit_before>#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
from BioID import BioID
# Nose test generator to iterate format test files defined in CSVs
class TestFormatDefinitions(object):
def test_formats(self):
with open("./testing/format_tests.csv", "rU") as formats_file:
test_files = formats_file.readlines()[1:]
for test_file in test_files:
filename, expected_format = test_file.rstrip(",\n").split(",")
yield self.check_format, filename, expected_format
@staticmethod
def check_format(test_file, expected_format):
# Putting the test file path here saves having to specify a path for each test file in the CSV
test_file_path = "./testing/testFiles/" + test_file
id_results = BioID("./formats.json").identify([test_file_path])
assert id_results[test_file_path] == expected_format<commit_msg>Set testing script to use YAML file (oops)<commit_after>#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
from BioID import BioID
# Nose test generator to iterate format test files defined in CSVs
class TestFormatDefinitions(object):
def test_formats(self):
with open("./testing/format_tests.csv", "rU") as formats_file:
test_files = formats_file.readlines()[1:]
for test_file in test_files:
filename, expected_format = test_file.rstrip(",\n").split(",")
yield self.check_format, filename, expected_format
@staticmethod
def check_format(test_file, expected_format):
# Putting the test file path here saves having to specify a path for each test file in the CSV
test_file_path = "./testing/testFiles/" + test_file
id_results = BioID("./formats.yml").identify([test_file_path])
assert id_results[test_file_path] == expected_format
|
7072389221f7e287328cecc695b93a77d04c69ba
|
tests/basecli_test.py
|
tests/basecli_test.py
|
from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def test_init(self):
assert self.app.main(['ass2m_test', 'init']) in (0, None)
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
|
from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
import sys
import re
from StringIO import StringIO
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def beginCapture(self):
self.stdout = sys.stdout
# begin capture
sys.stdout = StringIO()
def endCapture(self):
captured = sys.stdout
# end capture
sys.stdout = self.stdout
self.stdout = None
return captured.getvalue()
def test_init(self):
self.beginCapture()
assert self.app.main(['ass2m_test', 'init']) in (0, None)
output = self.endCapture()
assert output.strip() == "Ass2m working directory created."
self.beginCapture()
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
output = self.endCapture()
assert re.match(re.escape(r'/')+r'\s+'+re.escape(r'all(rl-)'), output, re.S)
assert re.match(".+"+re.escape(r'/.ass2m/')+r'\s+'+re.escape(r'all(---)'), output, re.S)
|
Test and capture the CLI output
|
Test and capture the CLI output
|
Python
|
agpl-3.0
|
laurentb/assnet,laurentb/assnet
|
from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def test_init(self):
assert self.app.main(['ass2m_test', 'init']) in (0, None)
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
Test and capture the CLI output
|
from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
import sys
import re
from StringIO import StringIO
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def beginCapture(self):
self.stdout = sys.stdout
# begin capture
sys.stdout = StringIO()
def endCapture(self):
captured = sys.stdout
# end capture
sys.stdout = self.stdout
self.stdout = None
return captured.getvalue()
def test_init(self):
self.beginCapture()
assert self.app.main(['ass2m_test', 'init']) in (0, None)
output = self.endCapture()
assert output.strip() == "Ass2m working directory created."
self.beginCapture()
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
output = self.endCapture()
assert re.match(re.escape(r'/')+r'\s+'+re.escape(r'all(rl-)'), output, re.S)
assert re.match(".+"+re.escape(r'/.ass2m/')+r'\s+'+re.escape(r'all(---)'), output, re.S)
|
<commit_before>from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def test_init(self):
assert self.app.main(['ass2m_test', 'init']) in (0, None)
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
<commit_msg>Test and capture the CLI output<commit_after>
|
from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
import sys
import re
from StringIO import StringIO
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def beginCapture(self):
self.stdout = sys.stdout
# begin capture
sys.stdout = StringIO()
def endCapture(self):
captured = sys.stdout
# end capture
sys.stdout = self.stdout
self.stdout = None
return captured.getvalue()
def test_init(self):
self.beginCapture()
assert self.app.main(['ass2m_test', 'init']) in (0, None)
output = self.endCapture()
assert output.strip() == "Ass2m working directory created."
self.beginCapture()
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
output = self.endCapture()
assert re.match(re.escape(r'/')+r'\s+'+re.escape(r'all(rl-)'), output, re.S)
assert re.match(".+"+re.escape(r'/.ass2m/')+r'\s+'+re.escape(r'all(---)'), output, re.S)
|
from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def test_init(self):
assert self.app.main(['ass2m_test', 'init']) in (0, None)
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
Test and capture the CLI outputfrom unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
import sys
import re
from StringIO import StringIO
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def beginCapture(self):
self.stdout = sys.stdout
# begin capture
sys.stdout = StringIO()
def endCapture(self):
captured = sys.stdout
# end capture
sys.stdout = self.stdout
self.stdout = None
return captured.getvalue()
def test_init(self):
self.beginCapture()
assert self.app.main(['ass2m_test', 'init']) in (0, None)
output = self.endCapture()
assert output.strip() == "Ass2m working directory created."
self.beginCapture()
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
output = self.endCapture()
assert re.match(re.escape(r'/')+r'\s+'+re.escape(r'all(rl-)'), output, re.S)
assert re.match(".+"+re.escape(r'/.ass2m/')+r'\s+'+re.escape(r'all(---)'), output, re.S)
|
<commit_before>from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def test_init(self):
assert self.app.main(['ass2m_test', 'init']) in (0, None)
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
<commit_msg>Test and capture the CLI output<commit_after>from unittest import TestCase
from ass2m.cli import CLI
from tempfile import mkdtemp
import shutil
import sys
import re
from StringIO import StringIO
class BaseCLITest(TestCase):
def setUp(self):
self.root = mkdtemp(prefix='ass2m_test_root')
self.app = CLI(self.root)
def tearDown(self):
if self.root:
shutil.rmtree(self.root)
def beginCapture(self):
self.stdout = sys.stdout
# begin capture
sys.stdout = StringIO()
def endCapture(self):
captured = sys.stdout
# end capture
sys.stdout = self.stdout
self.stdout = None
return captured.getvalue()
def test_init(self):
self.beginCapture()
assert self.app.main(['ass2m_test', 'init']) in (0, None)
output = self.endCapture()
assert output.strip() == "Ass2m working directory created."
self.beginCapture()
assert self.app.main(['ass2m_test', 'tree']) in (0, None)
output = self.endCapture()
assert re.match(re.escape(r'/')+r'\s+'+re.escape(r'all(rl-)'), output, re.S)
assert re.match(".+"+re.escape(r'/.ass2m/')+r'\s+'+re.escape(r'all(---)'), output, re.S)
|
926d5333c1556850a3eda6025ac8cf471b67c0a3
|
condor/probes/setup.py
|
condor/probes/setup.py
|
#!/usr/bin/env python
# Copyright 2015 University of Chicago
# Available under Apache 2.0 License
from distutils.core import setup
setup(name='htcondor-es-probes',
version='0.6.3',
description='HTCondor probes for Elasticsearch analytics',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor',
packages=['probe_libs'],
scripts=['collect_history_info.py', 'get_job_status.py'],
data_files=[('/etc/init.d/', ['scripts/collect_history']),
('/etc/cron.d/', ['config/schedd_probe']),
('/etc/sysconfig', ['config/collect_history'])],
license = 'Apache 2.0'
)
|
#!/usr/bin/env python
# Copyright 2015 University of Chicago
# Available under Apache 2.0 License
from distutils.core import setup
setup(name='htcondor-es-probes',
version='0.6.3',
description='HTCondor probes for Elasticsearch analytics',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor',
packages=['probe_libs'],
scripts=['collect_history_info.py', 'get_job_status.py'],
data_files=[('/etc/init.d/', ['scripts/collect_history']),
('/etc/cron.d/', ['config/schedd_probe']),
('/var/lib/collect_history', []),
('/etc/sysconfig', ['config/collect_history'])],
license='Apache 2.0'
)
|
Add directory for state files
|
Add directory for state files
|
Python
|
apache-2.0
|
DHTC-Tools/logstash-confs,DHTC-Tools/logstash-confs,DHTC-Tools/logstash-confs
|
#!/usr/bin/env python
# Copyright 2015 University of Chicago
# Available under Apache 2.0 License
from distutils.core import setup
setup(name='htcondor-es-probes',
version='0.6.3',
description='HTCondor probes for Elasticsearch analytics',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor',
packages=['probe_libs'],
scripts=['collect_history_info.py', 'get_job_status.py'],
data_files=[('/etc/init.d/', ['scripts/collect_history']),
('/etc/cron.d/', ['config/schedd_probe']),
('/etc/sysconfig', ['config/collect_history'])],
license = 'Apache 2.0'
)
Add directory for state files
|
#!/usr/bin/env python
# Copyright 2015 University of Chicago
# Available under Apache 2.0 License
from distutils.core import setup
setup(name='htcondor-es-probes',
version='0.6.3',
description='HTCondor probes for Elasticsearch analytics',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor',
packages=['probe_libs'],
scripts=['collect_history_info.py', 'get_job_status.py'],
data_files=[('/etc/init.d/', ['scripts/collect_history']),
('/etc/cron.d/', ['config/schedd_probe']),
('/var/lib/collect_history', []),
('/etc/sysconfig', ['config/collect_history'])],
license='Apache 2.0'
)
|
<commit_before>#!/usr/bin/env python
# Copyright 2015 University of Chicago
# Available under Apache 2.0 License
from distutils.core import setup
setup(name='htcondor-es-probes',
version='0.6.3',
description='HTCondor probes for Elasticsearch analytics',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor',
packages=['probe_libs'],
scripts=['collect_history_info.py', 'get_job_status.py'],
data_files=[('/etc/init.d/', ['scripts/collect_history']),
('/etc/cron.d/', ['config/schedd_probe']),
('/etc/sysconfig', ['config/collect_history'])],
license = 'Apache 2.0'
)
<commit_msg>Add directory for state files<commit_after>
|
#!/usr/bin/env python
# Copyright 2015 University of Chicago
# Available under Apache 2.0 License
from distutils.core import setup
setup(name='htcondor-es-probes',
version='0.6.3',
description='HTCondor probes for Elasticsearch analytics',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor',
packages=['probe_libs'],
scripts=['collect_history_info.py', 'get_job_status.py'],
data_files=[('/etc/init.d/', ['scripts/collect_history']),
('/etc/cron.d/', ['config/schedd_probe']),
('/var/lib/collect_history', []),
('/etc/sysconfig', ['config/collect_history'])],
license='Apache 2.0'
)
|
#!/usr/bin/env python
# Copyright 2015 University of Chicago
# Available under Apache 2.0 License
from distutils.core import setup
setup(name='htcondor-es-probes',
version='0.6.3',
description='HTCondor probes for Elasticsearch analytics',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor',
packages=['probe_libs'],
scripts=['collect_history_info.py', 'get_job_status.py'],
data_files=[('/etc/init.d/', ['scripts/collect_history']),
('/etc/cron.d/', ['config/schedd_probe']),
('/etc/sysconfig', ['config/collect_history'])],
license = 'Apache 2.0'
)
Add directory for state files#!/usr/bin/env python
# Copyright 2015 University of Chicago
# Available under Apache 2.0 License
from distutils.core import setup
setup(name='htcondor-es-probes',
version='0.6.3',
description='HTCondor probes for Elasticsearch analytics',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor',
packages=['probe_libs'],
scripts=['collect_history_info.py', 'get_job_status.py'],
data_files=[('/etc/init.d/', ['scripts/collect_history']),
('/etc/cron.d/', ['config/schedd_probe']),
('/var/lib/collect_history', []),
('/etc/sysconfig', ['config/collect_history'])],
license='Apache 2.0'
)
|
<commit_before>#!/usr/bin/env python
# Copyright 2015 University of Chicago
# Available under Apache 2.0 License
from distutils.core import setup
setup(name='htcondor-es-probes',
version='0.6.3',
description='HTCondor probes for Elasticsearch analytics',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor',
packages=['probe_libs'],
scripts=['collect_history_info.py', 'get_job_status.py'],
data_files=[('/etc/init.d/', ['scripts/collect_history']),
('/etc/cron.d/', ['config/schedd_probe']),
('/etc/sysconfig', ['config/collect_history'])],
license = 'Apache 2.0'
)
<commit_msg>Add directory for state files<commit_after>#!/usr/bin/env python
# Copyright 2015 University of Chicago
# Available under Apache 2.0 License
from distutils.core import setup
setup(name='htcondor-es-probes',
version='0.6.3',
description='HTCondor probes for Elasticsearch analytics',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor',
packages=['probe_libs'],
scripts=['collect_history_info.py', 'get_job_status.py'],
data_files=[('/etc/init.d/', ['scripts/collect_history']),
('/etc/cron.d/', ['config/schedd_probe']),
('/var/lib/collect_history', []),
('/etc/sysconfig', ['config/collect_history'])],
license='Apache 2.0'
)
|
2d102e049ceb4ac6d9892313e78b82fc91f9e84c
|
tests/test_filters.py
|
tests/test_filters.py
|
from unittest import TestCase
import numpy as np
from hrv.filters import moving_average
class Filter(TestCase):
def test_moving_average_order_3(self):
fake_rri = np.array([810, 830, 860, 790, 804])
rri_filt = moving_average(fake_rri, order=3)
expected = [810, 833.33, 826.66, 818, 804]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
|
from unittest import TestCase
import numpy as np
from hrv.filters import moving_average
class Filter(TestCase):
def test_moving_average_order_3(self):
fake_rri = np.array([810, 830, 860, 790, 804])
rri_filt = moving_average(fake_rri, order=3)
expected = [810, 833.33, 826.66, 818, 804]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
def test_moving_average_order_5(self):
fake_rri = np.array([810, 830, 860, 790, 804, 801, 800])
rri_filt = moving_average(fake_rri, order=5)
expected = [810, 830, 818.79, 817.0, 811.0, 801, 800]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
|
Test moving average filter for 5th order
|
Test moving average filter for 5th order
|
Python
|
bsd-3-clause
|
rhenanbartels/hrv
|
from unittest import TestCase
import numpy as np
from hrv.filters import moving_average
class Filter(TestCase):
def test_moving_average_order_3(self):
fake_rri = np.array([810, 830, 860, 790, 804])
rri_filt = moving_average(fake_rri, order=3)
expected = [810, 833.33, 826.66, 818, 804]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
Test moving average filter for 5th order
|
from unittest import TestCase
import numpy as np
from hrv.filters import moving_average
class Filter(TestCase):
def test_moving_average_order_3(self):
fake_rri = np.array([810, 830, 860, 790, 804])
rri_filt = moving_average(fake_rri, order=3)
expected = [810, 833.33, 826.66, 818, 804]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
def test_moving_average_order_5(self):
fake_rri = np.array([810, 830, 860, 790, 804, 801, 800])
rri_filt = moving_average(fake_rri, order=5)
expected = [810, 830, 818.79, 817.0, 811.0, 801, 800]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
|
<commit_before>from unittest import TestCase
import numpy as np
from hrv.filters import moving_average
class Filter(TestCase):
def test_moving_average_order_3(self):
fake_rri = np.array([810, 830, 860, 790, 804])
rri_filt = moving_average(fake_rri, order=3)
expected = [810, 833.33, 826.66, 818, 804]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
<commit_msg>Test moving average filter for 5th order<commit_after>
|
from unittest import TestCase
import numpy as np
from hrv.filters import moving_average
class Filter(TestCase):
def test_moving_average_order_3(self):
fake_rri = np.array([810, 830, 860, 790, 804])
rri_filt = moving_average(fake_rri, order=3)
expected = [810, 833.33, 826.66, 818, 804]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
def test_moving_average_order_5(self):
fake_rri = np.array([810, 830, 860, 790, 804, 801, 800])
rri_filt = moving_average(fake_rri, order=5)
expected = [810, 830, 818.79, 817.0, 811.0, 801, 800]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
|
from unittest import TestCase
import numpy as np
from hrv.filters import moving_average
class Filter(TestCase):
def test_moving_average_order_3(self):
fake_rri = np.array([810, 830, 860, 790, 804])
rri_filt = moving_average(fake_rri, order=3)
expected = [810, 833.33, 826.66, 818, 804]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
Test moving average filter for 5th orderfrom unittest import TestCase
import numpy as np
from hrv.filters import moving_average
class Filter(TestCase):
def test_moving_average_order_3(self):
fake_rri = np.array([810, 830, 860, 790, 804])
rri_filt = moving_average(fake_rri, order=3)
expected = [810, 833.33, 826.66, 818, 804]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
def test_moving_average_order_5(self):
fake_rri = np.array([810, 830, 860, 790, 804, 801, 800])
rri_filt = moving_average(fake_rri, order=5)
expected = [810, 830, 818.79, 817.0, 811.0, 801, 800]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
|
<commit_before>from unittest import TestCase
import numpy as np
from hrv.filters import moving_average
class Filter(TestCase):
def test_moving_average_order_3(self):
fake_rri = np.array([810, 830, 860, 790, 804])
rri_filt = moving_average(fake_rri, order=3)
expected = [810, 833.33, 826.66, 818, 804]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
<commit_msg>Test moving average filter for 5th order<commit_after>from unittest import TestCase
import numpy as np
from hrv.filters import moving_average
class Filter(TestCase):
def test_moving_average_order_3(self):
fake_rri = np.array([810, 830, 860, 790, 804])
rri_filt = moving_average(fake_rri, order=3)
expected = [810, 833.33, 826.66, 818, 804]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
def test_moving_average_order_5(self):
fake_rri = np.array([810, 830, 860, 790, 804, 801, 800])
rri_filt = moving_average(fake_rri, order=5)
expected = [810, 830, 818.79, 817.0, 811.0, 801, 800]
np.testing.assert_almost_equal(rri_filt, expected, decimal=2)
|
e1c57cb41c59c118648602ff9837418e5d4baad4
|
saleor/dashboard/category/forms.py
|
saleor/dashboard/category/forms.py
|
from django import forms
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
|
Add validation on category parent field
|
Add validation on category parent field
|
Python
|
bsd-3-clause
|
itbabu/saleor,rchav/vinerack,avorio/saleor,HyperManTT/ECommerceSaleor,laosunhust/saleor,itbabu/saleor,josesanch/saleor,Drekscott/Motlaesaleor,maferelo/saleor,taedori81/saleor,rchav/vinerack,avorio/saleor,rodrigozn/CW-Shop,avorio/saleor,maferelo/saleor,arth-co/saleor,paweltin/saleor,jreigel/saleor,paweltin/saleor,Drekscott/Motlaesaleor,taedori81/saleor,UITools/saleor,taedori81/saleor,dashmug/saleor,itbabu/saleor,tfroehlich82/saleor,paweltin/saleor,UITools/saleor,jreigel/saleor,avorio/saleor,arth-co/saleor,laosunhust/saleor,dashmug/saleor,rodrigozn/CW-Shop,rchav/vinerack,KenMutemi/saleor,spartonia/saleor,spartonia/saleor,KenMutemi/saleor,laosunhust/saleor,jreigel/saleor,arth-co/saleor,tfroehlich82/saleor,arth-co/saleor,josesanch/saleor,car3oon/saleor,Drekscott/Motlaesaleor,josesanch/saleor,UITools/saleor,rodrigozn/CW-Shop,UITools/saleor,spartonia/saleor,HyperManTT/ECommerceSaleor,maferelo/saleor,paweltin/saleor,Drekscott/Motlaesaleor,HyperManTT/ECommerceSaleor,UITools/saleor,KenMutemi/saleor,spartonia/saleor,mociepka/saleor,taedori81/saleor,car3oon/saleor,mociepka/saleor,mociepka/saleor,laosunhust/saleor,car3oon/saleor,dashmug/saleor,tfroehlich82/saleor
|
from django import forms
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []Add validation on category parent field
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
|
<commit_before>from django import forms
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []<commit_msg>Add validation on category parent field<commit_after>
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
|
from django import forms
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []Add validation on category parent fieldfrom django import forms
from django.utils.translation import ugettext_lazy as _
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
|
<commit_before>from django import forms
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []<commit_msg>Add validation on category parent field<commit_after>from django import forms
from django.utils.translation import ugettext_lazy as _
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
|
7b9206d7c3fcf91c6ac16b54b9e1d13b92f7802a
|
tests/test_testing.py
|
tests/test_testing.py
|
# Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Test MetPy's testing utilities."""
import numpy as np
import pytest
from metpy.testing import assert_array_almost_equal
# Test #1183: numpy.testing.assert_array* ignores any masked value, so work-around
def test_masked_arrays():
"""Test that we catch masked arrays with different masks."""
with pytest.raises(AssertionError):
assert_array_almost_equal(np.array([10, 20]),
np.ma.array([10, np.nan], mask=[False, True]), 2)
def test_masked_and_no_mask():
"""Test that we can compare a masked array with no masked values and a regular array."""
a = np.array([10, 20])
b = np.ma.array([10, 20], mask=[False, False])
assert_array_almost_equal(a, b)
|
# Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Test MetPy's testing utilities."""
import warnings
import numpy as np
import pytest
from metpy.deprecation import MetpyDeprecationWarning
from metpy.testing import assert_array_almost_equal, check_and_silence_deprecation
# Test #1183: numpy.testing.assert_array* ignores any masked value, so work-around
def test_masked_arrays():
"""Test that we catch masked arrays with different masks."""
with pytest.raises(AssertionError):
assert_array_almost_equal(np.array([10, 20]),
np.ma.array([10, np.nan], mask=[False, True]), 2)
def test_masked_and_no_mask():
"""Test that we can compare a masked array with no masked values and a regular array."""
a = np.array([10, 20])
b = np.ma.array([10, 20], mask=[False, False])
assert_array_almost_equal(a, b)
@check_and_silence_deprecation
def test_deprecation_decorator():
"""Make sure the deprecation checker works."""
warnings.warn('Testing warning.', MetpyDeprecationWarning)
|
Add explicit test for deprecation decorator
|
MNT: Add explicit test for deprecation decorator
|
Python
|
bsd-3-clause
|
dopplershift/MetPy,ShawnMurd/MetPy,ahaberlie/MetPy,Unidata/MetPy,Unidata/MetPy,ahaberlie/MetPy,dopplershift/MetPy
|
# Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Test MetPy's testing utilities."""
import numpy as np
import pytest
from metpy.testing import assert_array_almost_equal
# Test #1183: numpy.testing.assert_array* ignores any masked value, so work-around
def test_masked_arrays():
"""Test that we catch masked arrays with different masks."""
with pytest.raises(AssertionError):
assert_array_almost_equal(np.array([10, 20]),
np.ma.array([10, np.nan], mask=[False, True]), 2)
def test_masked_and_no_mask():
"""Test that we can compare a masked array with no masked values and a regular array."""
a = np.array([10, 20])
b = np.ma.array([10, 20], mask=[False, False])
assert_array_almost_equal(a, b)
MNT: Add explicit test for deprecation decorator
|
# Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Test MetPy's testing utilities."""
import warnings
import numpy as np
import pytest
from metpy.deprecation import MetpyDeprecationWarning
from metpy.testing import assert_array_almost_equal, check_and_silence_deprecation
# Test #1183: numpy.testing.assert_array* ignores any masked value, so work-around
def test_masked_arrays():
"""Test that we catch masked arrays with different masks."""
with pytest.raises(AssertionError):
assert_array_almost_equal(np.array([10, 20]),
np.ma.array([10, np.nan], mask=[False, True]), 2)
def test_masked_and_no_mask():
"""Test that we can compare a masked array with no masked values and a regular array."""
a = np.array([10, 20])
b = np.ma.array([10, 20], mask=[False, False])
assert_array_almost_equal(a, b)
@check_and_silence_deprecation
def test_deprecation_decorator():
"""Make sure the deprecation checker works."""
warnings.warn('Testing warning.', MetpyDeprecationWarning)
|
<commit_before># Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Test MetPy's testing utilities."""
import numpy as np
import pytest
from metpy.testing import assert_array_almost_equal
# Test #1183: numpy.testing.assert_array* ignores any masked value, so work-around
def test_masked_arrays():
"""Test that we catch masked arrays with different masks."""
with pytest.raises(AssertionError):
assert_array_almost_equal(np.array([10, 20]),
np.ma.array([10, np.nan], mask=[False, True]), 2)
def test_masked_and_no_mask():
"""Test that we can compare a masked array with no masked values and a regular array."""
a = np.array([10, 20])
b = np.ma.array([10, 20], mask=[False, False])
assert_array_almost_equal(a, b)
<commit_msg>MNT: Add explicit test for deprecation decorator<commit_after>
|
# Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Test MetPy's testing utilities."""
import warnings
import numpy as np
import pytest
from metpy.deprecation import MetpyDeprecationWarning
from metpy.testing import assert_array_almost_equal, check_and_silence_deprecation
# Test #1183: numpy.testing.assert_array* ignores any masked value, so work-around
def test_masked_arrays():
"""Test that we catch masked arrays with different masks."""
with pytest.raises(AssertionError):
assert_array_almost_equal(np.array([10, 20]),
np.ma.array([10, np.nan], mask=[False, True]), 2)
def test_masked_and_no_mask():
"""Test that we can compare a masked array with no masked values and a regular array."""
a = np.array([10, 20])
b = np.ma.array([10, 20], mask=[False, False])
assert_array_almost_equal(a, b)
@check_and_silence_deprecation
def test_deprecation_decorator():
"""Make sure the deprecation checker works."""
warnings.warn('Testing warning.', MetpyDeprecationWarning)
|
# Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Test MetPy's testing utilities."""
import numpy as np
import pytest
from metpy.testing import assert_array_almost_equal
# Test #1183: numpy.testing.assert_array* ignores any masked value, so work-around
def test_masked_arrays():
"""Test that we catch masked arrays with different masks."""
with pytest.raises(AssertionError):
assert_array_almost_equal(np.array([10, 20]),
np.ma.array([10, np.nan], mask=[False, True]), 2)
def test_masked_and_no_mask():
"""Test that we can compare a masked array with no masked values and a regular array."""
a = np.array([10, 20])
b = np.ma.array([10, 20], mask=[False, False])
assert_array_almost_equal(a, b)
MNT: Add explicit test for deprecation decorator# Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Test MetPy's testing utilities."""
import warnings
import numpy as np
import pytest
from metpy.deprecation import MetpyDeprecationWarning
from metpy.testing import assert_array_almost_equal, check_and_silence_deprecation
# Test #1183: numpy.testing.assert_array* ignores any masked value, so work-around
def test_masked_arrays():
"""Test that we catch masked arrays with different masks."""
with pytest.raises(AssertionError):
assert_array_almost_equal(np.array([10, 20]),
np.ma.array([10, np.nan], mask=[False, True]), 2)
def test_masked_and_no_mask():
"""Test that we can compare a masked array with no masked values and a regular array."""
a = np.array([10, 20])
b = np.ma.array([10, 20], mask=[False, False])
assert_array_almost_equal(a, b)
@check_and_silence_deprecation
def test_deprecation_decorator():
"""Make sure the deprecation checker works."""
warnings.warn('Testing warning.', MetpyDeprecationWarning)
|
<commit_before># Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Test MetPy's testing utilities."""
import numpy as np
import pytest
from metpy.testing import assert_array_almost_equal
# Test #1183: numpy.testing.assert_array* ignores any masked value, so work-around
def test_masked_arrays():
"""Test that we catch masked arrays with different masks."""
with pytest.raises(AssertionError):
assert_array_almost_equal(np.array([10, 20]),
np.ma.array([10, np.nan], mask=[False, True]), 2)
def test_masked_and_no_mask():
"""Test that we can compare a masked array with no masked values and a regular array."""
a = np.array([10, 20])
b = np.ma.array([10, 20], mask=[False, False])
assert_array_almost_equal(a, b)
<commit_msg>MNT: Add explicit test for deprecation decorator<commit_after># Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Test MetPy's testing utilities."""
import warnings
import numpy as np
import pytest
from metpy.deprecation import MetpyDeprecationWarning
from metpy.testing import assert_array_almost_equal, check_and_silence_deprecation
# Test #1183: numpy.testing.assert_array* ignores any masked value, so work-around
def test_masked_arrays():
"""Test that we catch masked arrays with different masks."""
with pytest.raises(AssertionError):
assert_array_almost_equal(np.array([10, 20]),
np.ma.array([10, np.nan], mask=[False, True]), 2)
def test_masked_and_no_mask():
"""Test that we can compare a masked array with no masked values and a regular array."""
a = np.array([10, 20])
b = np.ma.array([10, 20], mask=[False, False])
assert_array_almost_equal(a, b)
@check_and_silence_deprecation
def test_deprecation_decorator():
"""Make sure the deprecation checker works."""
warnings.warn('Testing warning.', MetpyDeprecationWarning)
|
fd39c97cd1cab3e55ba6aa067127af93e41af506
|
tests/travis-setup.py
|
tests/travis-setup.py
|
import bcrypt
import sys
import os
sys.path.insert(0, "..")
from timpani import database
connection = database.DatabaseConnection()
hashedpassword = bcrypt.hashpw(bytes("password", "utf-8"), bcrypt.gensalt()).decode("utf-8")
user = database.tables.User(username = "tests", password = hashedpassword, full_name = "Timpani Tests", can_change_settings = True, can_write_posts = True)
connection.session.add(user)
connection.session.commit()
connection.close()
|
import bcrypt
import sys
import os
sys.path.insert(0, "..")
from timpani import database
connection = database.DatabaseConnection()
hashedpassword = bcrypt.hashpw(bytes("password", "utf-8"), bcrypt.gensalt()).decode("utf-8")
user = database.tables.User(username = "tests", password = hashedpassword, full_name = "Timpani Tests", can_change_settings = True, can_write_posts = True)
connection.session.add(user)
connection.session.execute("CREATE INDEX ON sessions(session_id)")
connection.session.commit()
connection.close()
|
Create index on session_id in order to speed tests
|
Create index on session_id in order to speed tests
It seems that session querying has been the longest component of all my
tests, and adding one test raised my test time signifigantly. Hopefully
this smooths somet of that out.
|
Python
|
mit
|
ollien/Timpani,ollien/Timpani,ollien/Timpani
|
import bcrypt
import sys
import os
sys.path.insert(0, "..")
from timpani import database
connection = database.DatabaseConnection()
hashedpassword = bcrypt.hashpw(bytes("password", "utf-8"), bcrypt.gensalt()).decode("utf-8")
user = database.tables.User(username = "tests", password = hashedpassword, full_name = "Timpani Tests", can_change_settings = True, can_write_posts = True)
connection.session.add(user)
connection.session.commit()
connection.close()
Create index on session_id in order to speed tests
It seems that session querying has been the longest component of all my
tests, and adding one test raised my test time signifigantly. Hopefully
this smooths somet of that out.
|
import bcrypt
import sys
import os
sys.path.insert(0, "..")
from timpani import database
connection = database.DatabaseConnection()
hashedpassword = bcrypt.hashpw(bytes("password", "utf-8"), bcrypt.gensalt()).decode("utf-8")
user = database.tables.User(username = "tests", password = hashedpassword, full_name = "Timpani Tests", can_change_settings = True, can_write_posts = True)
connection.session.add(user)
connection.session.execute("CREATE INDEX ON sessions(session_id)")
connection.session.commit()
connection.close()
|
<commit_before>import bcrypt
import sys
import os
sys.path.insert(0, "..")
from timpani import database
connection = database.DatabaseConnection()
hashedpassword = bcrypt.hashpw(bytes("password", "utf-8"), bcrypt.gensalt()).decode("utf-8")
user = database.tables.User(username = "tests", password = hashedpassword, full_name = "Timpani Tests", can_change_settings = True, can_write_posts = True)
connection.session.add(user)
connection.session.commit()
connection.close()
<commit_msg>Create index on session_id in order to speed tests
It seems that session querying has been the longest component of all my
tests, and adding one test raised my test time signifigantly. Hopefully
this smooths somet of that out.<commit_after>
|
import bcrypt
import sys
import os
sys.path.insert(0, "..")
from timpani import database
connection = database.DatabaseConnection()
hashedpassword = bcrypt.hashpw(bytes("password", "utf-8"), bcrypt.gensalt()).decode("utf-8")
user = database.tables.User(username = "tests", password = hashedpassword, full_name = "Timpani Tests", can_change_settings = True, can_write_posts = True)
connection.session.add(user)
connection.session.execute("CREATE INDEX ON sessions(session_id)")
connection.session.commit()
connection.close()
|
import bcrypt
import sys
import os
sys.path.insert(0, "..")
from timpani import database
connection = database.DatabaseConnection()
hashedpassword = bcrypt.hashpw(bytes("password", "utf-8"), bcrypt.gensalt()).decode("utf-8")
user = database.tables.User(username = "tests", password = hashedpassword, full_name = "Timpani Tests", can_change_settings = True, can_write_posts = True)
connection.session.add(user)
connection.session.commit()
connection.close()
Create index on session_id in order to speed tests
It seems that session querying has been the longest component of all my
tests, and adding one test raised my test time signifigantly. Hopefully
this smooths somet of that out.import bcrypt
import sys
import os
sys.path.insert(0, "..")
from timpani import database
connection = database.DatabaseConnection()
hashedpassword = bcrypt.hashpw(bytes("password", "utf-8"), bcrypt.gensalt()).decode("utf-8")
user = database.tables.User(username = "tests", password = hashedpassword, full_name = "Timpani Tests", can_change_settings = True, can_write_posts = True)
connection.session.add(user)
connection.session.execute("CREATE INDEX ON sessions(session_id)")
connection.session.commit()
connection.close()
|
<commit_before>import bcrypt
import sys
import os
sys.path.insert(0, "..")
from timpani import database
connection = database.DatabaseConnection()
hashedpassword = bcrypt.hashpw(bytes("password", "utf-8"), bcrypt.gensalt()).decode("utf-8")
user = database.tables.User(username = "tests", password = hashedpassword, full_name = "Timpani Tests", can_change_settings = True, can_write_posts = True)
connection.session.add(user)
connection.session.commit()
connection.close()
<commit_msg>Create index on session_id in order to speed tests
It seems that session querying has been the longest component of all my
tests, and adding one test raised my test time signifigantly. Hopefully
this smooths somet of that out.<commit_after>import bcrypt
import sys
import os
sys.path.insert(0, "..")
from timpani import database
connection = database.DatabaseConnection()
hashedpassword = bcrypt.hashpw(bytes("password", "utf-8"), bcrypt.gensalt()).decode("utf-8")
user = database.tables.User(username = "tests", password = hashedpassword, full_name = "Timpani Tests", can_change_settings = True, can_write_posts = True)
connection.session.add(user)
connection.session.execute("CREATE INDEX ON sessions(session_id)")
connection.session.commit()
connection.close()
|
f745a1cd6b5f8b0991c7af4f631b8b5c09bf79f5
|
tailorscad/tests/test_arg_parser.py
|
tailorscad/tests/test_arg_parser.py
|
import unittest
from tailorscad.arg_parser import parse_args
class TestArgParser(unittest.TestCase):
def test_parse_args_none(self):
args = []
argv = []
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_inknown(self):
args = []
argv = ['-a', 'word']
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
|
import unittest
from tailorscad.arg_parser import parse_args
# TODO: Making the config require required some changes I don't like to this
class TestArgParser(unittest.TestCase):
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
|
Remove some tests because of new required arg
|
Remove some tests because of new required arg
|
Python
|
mit
|
savorywatt/tailorSCAD
|
import unittest
from tailorscad.arg_parser import parse_args
class TestArgParser(unittest.TestCase):
def test_parse_args_none(self):
args = []
argv = []
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_inknown(self):
args = []
argv = ['-a', 'word']
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
Remove some tests because of new required arg
|
import unittest
from tailorscad.arg_parser import parse_args
# TODO: Making the config require required some changes I don't like to this
class TestArgParser(unittest.TestCase):
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
|
<commit_before>
import unittest
from tailorscad.arg_parser import parse_args
class TestArgParser(unittest.TestCase):
def test_parse_args_none(self):
args = []
argv = []
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_inknown(self):
args = []
argv = ['-a', 'word']
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
<commit_msg>Remove some tests because of new required arg<commit_after>
|
import unittest
from tailorscad.arg_parser import parse_args
# TODO: Making the config require required some changes I don't like to this
class TestArgParser(unittest.TestCase):
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
|
import unittest
from tailorscad.arg_parser import parse_args
class TestArgParser(unittest.TestCase):
def test_parse_args_none(self):
args = []
argv = []
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_inknown(self):
args = []
argv = ['-a', 'word']
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
Remove some tests because of new required arg
import unittest
from tailorscad.arg_parser import parse_args
# TODO: Making the config require required some changes I don't like to this
class TestArgParser(unittest.TestCase):
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
|
<commit_before>
import unittest
from tailorscad.arg_parser import parse_args
class TestArgParser(unittest.TestCase):
def test_parse_args_none(self):
args = []
argv = []
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_inknown(self):
args = []
argv = ['-a', 'word']
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
<commit_msg>Remove some tests because of new required arg<commit_after>
import unittest
from tailorscad.arg_parser import parse_args
# TODO: Making the config require required some changes I don't like to this
class TestArgParser(unittest.TestCase):
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
|
25133d90fe267dba522c9b87eb0bd614ae8556dd
|
web_433Mhz/views.py
|
web_433Mhz/views.py
|
from web_433Mhz import app
from flask import render_template
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
|
from web_433Mhz import app
from flask import render_template, jsonify
import subprocess
import os
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
@app.route('/api/get_code', methods=['GET'])
def get_code():
proc = subprocess.Popen(os.path.abspath('../433Mhz'),\
stdout=subprocess.PIPE)
code = proc.communicate()[0].decode('utf-8') # Grab the stdout
return jsonify({'code': code})
|
Add api call to open binary and grab stdout
|
Add api call to open binary and grab stdout
|
Python
|
agpl-3.0
|
tuxxy/433Mhz_web,tuxxy/433Mhz_web,tuxxy/433Mhz_web,tuxxy/433Mhz_web
|
from web_433Mhz import app
from flask import render_template
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
Add api call to open binary and grab stdout
|
from web_433Mhz import app
from flask import render_template, jsonify
import subprocess
import os
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
@app.route('/api/get_code', methods=['GET'])
def get_code():
proc = subprocess.Popen(os.path.abspath('../433Mhz'),\
stdout=subprocess.PIPE)
code = proc.communicate()[0].decode('utf-8') # Grab the stdout
return jsonify({'code': code})
|
<commit_before>from web_433Mhz import app
from flask import render_template
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
<commit_msg>Add api call to open binary and grab stdout<commit_after>
|
from web_433Mhz import app
from flask import render_template, jsonify
import subprocess
import os
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
@app.route('/api/get_code', methods=['GET'])
def get_code():
proc = subprocess.Popen(os.path.abspath('../433Mhz'),\
stdout=subprocess.PIPE)
code = proc.communicate()[0].decode('utf-8') # Grab the stdout
return jsonify({'code': code})
|
from web_433Mhz import app
from flask import render_template
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
Add api call to open binary and grab stdoutfrom web_433Mhz import app
from flask import render_template, jsonify
import subprocess
import os
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
@app.route('/api/get_code', methods=['GET'])
def get_code():
proc = subprocess.Popen(os.path.abspath('../433Mhz'),\
stdout=subprocess.PIPE)
code = proc.communicate()[0].decode('utf-8') # Grab the stdout
return jsonify({'code': code})
|
<commit_before>from web_433Mhz import app
from flask import render_template
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
<commit_msg>Add api call to open binary and grab stdout<commit_after>from web_433Mhz import app
from flask import render_template, jsonify
import subprocess
import os
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
@app.route('/api/get_code', methods=['GET'])
def get_code():
proc = subprocess.Popen(os.path.abspath('../433Mhz'),\
stdout=subprocess.PIPE)
code = proc.communicate()[0].decode('utf-8') # Grab the stdout
return jsonify({'code': code})
|
7d28f4e101200515152f3281aafdda1315d290fc
|
scheduler/schedule.py
|
scheduler/schedule.py
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=3600) # 1hr timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=604800) # 7 day timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
Increase timeout of update_graph job to 7 days
|
Increase timeout of update_graph job to 7 days
|
Python
|
apache-2.0
|
ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=3600) # 1hr timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
Increase timeout of update_graph job to 7 days
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=604800) # 7 day timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
<commit_before>import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=3600) # 1hr timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
<commit_msg>Increase timeout of update_graph job to 7 days<commit_after>
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=604800) # 7 day timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=3600) # 1hr timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
Increase timeout of update_graph job to 7 daysimport sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=604800) # 7 day timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
<commit_before>import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=3600) # 1hr timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
<commit_msg>Increase timeout of update_graph job to 7 days<commit_after>import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=604800) # 7 day timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
5c30173731d058b51d7a94238a3ccf5984e2e790
|
echo_server.py
|
echo_server.py
|
#!/usr/bin/env python
import socket
def main():
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
conn, addr = server_socket.accept()
msg = conn.recv(1024)
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import socket
def main():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
conn, addr = server_socket.accept()
msg = conn.recv(1024)
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
if __name__ == '__main__':
main()
|
Change format to satify pedantic linter
|
Change format to satify pedantic linter
|
Python
|
mit
|
charlieRode/network_tools
|
#!/usr/bin/env python
import socket
def main():
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
conn, addr = server_socket.accept()
msg = conn.recv(1024)
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
if __name__ == '__main__':
main()
Change format to satify pedantic linter
|
#!/usr/bin/env python
import socket
def main():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
conn, addr = server_socket.accept()
msg = conn.recv(1024)
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import socket
def main():
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
conn, addr = server_socket.accept()
msg = conn.recv(1024)
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
if __name__ == '__main__':
main()
<commit_msg>Change format to satify pedantic linter<commit_after>
|
#!/usr/bin/env python
import socket
def main():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
conn, addr = server_socket.accept()
msg = conn.recv(1024)
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import socket
def main():
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
conn, addr = server_socket.accept()
msg = conn.recv(1024)
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
if __name__ == '__main__':
main()
Change format to satify pedantic linter#!/usr/bin/env python
import socket
def main():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
conn, addr = server_socket.accept()
msg = conn.recv(1024)
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import socket
def main():
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
conn, addr = server_socket.accept()
msg = conn.recv(1024)
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
if __name__ == '__main__':
main()
<commit_msg>Change format to satify pedantic linter<commit_after>#!/usr/bin/env python
import socket
def main():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
conn, addr = server_socket.accept()
msg = conn.recv(1024)
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
if __name__ == '__main__':
main()
|
76ecb6a4b71d1a248b21cf1671360514dc6c3be2
|
mobile/backends/twilio.py
|
mobile/backends/twilio.py
|
# encoding: utf-8
from twilio.rest import TwilioRestClient
from mobile.backends.base import BaseBackend
class Backend(BaseBackend):
"""Twilio Gate Backend."""
class SMS:
@classmethod
def send(self, recipient, sender, message):
"""
Send an SMS and return its initial delivery status code.
See twilio-python Documentation: https://github.com/twilio/twilio-python
"""
client = TwilioRestClient()
message = client.messages.create(
to=recipient,
from_=sender,
body=message
)
return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage]
@classmethod
def receive(self, data):
"""Return IncomingSMS instance from parsed data."""
raise NotImplementedError
class MMS:
@classmethod
def receive(self, data):
"""Return IncomingMMS instance from parsed data."""
raise NotImplementedError
|
# encoding: utf-8
import twilio.twiml
from django.http import QueryDict
from twilio.rest import TwilioRestClient
from mobile.backends.base import BaseBackend
import mobile.models
class Backend(BaseBackend):
"""Twilio Gate Backend."""
class SMS:
@classmethod
def send(self, recipient, sender, message):
"""
Send an SMS and return its initial delivery status code.
See twilio-python Documentation: https://github.com/twilio/twilio-python
"""
client = TwilioRestClient()
message = client.messages.create(
to=recipient,
from_=sender,
body=message
)
return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage]
@classmethod
def receive(self, data):
"""Return IncomingSMS instance from parsed data."""
data = QueryDict(data).copy()
sms = mobile.models.IncomingSMS(
message_id=data.get('MessageSid'),
country=data.get('FromCountry', None),
sender=data.get('From'),
recipient=data.get('To'),
message=data.get('Body'),
source=data
)
return sms.save()
class MMS:
@classmethod
def receive(self, data):
"""Return IncomingMMS instance from parsed data."""
raise NotImplementedError
|
Add receive support to Twilio backend
|
Add receive support to Twilio backend
|
Python
|
mit
|
hyperoslo/django-mobile
|
# encoding: utf-8
from twilio.rest import TwilioRestClient
from mobile.backends.base import BaseBackend
class Backend(BaseBackend):
"""Twilio Gate Backend."""
class SMS:
@classmethod
def send(self, recipient, sender, message):
"""
Send an SMS and return its initial delivery status code.
See twilio-python Documentation: https://github.com/twilio/twilio-python
"""
client = TwilioRestClient()
message = client.messages.create(
to=recipient,
from_=sender,
body=message
)
return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage]
@classmethod
def receive(self, data):
"""Return IncomingSMS instance from parsed data."""
raise NotImplementedError
class MMS:
@classmethod
def receive(self, data):
"""Return IncomingMMS instance from parsed data."""
raise NotImplementedError
Add receive support to Twilio backend
|
# encoding: utf-8
import twilio.twiml
from django.http import QueryDict
from twilio.rest import TwilioRestClient
from mobile.backends.base import BaseBackend
import mobile.models
class Backend(BaseBackend):
"""Twilio Gate Backend."""
class SMS:
@classmethod
def send(self, recipient, sender, message):
"""
Send an SMS and return its initial delivery status code.
See twilio-python Documentation: https://github.com/twilio/twilio-python
"""
client = TwilioRestClient()
message = client.messages.create(
to=recipient,
from_=sender,
body=message
)
return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage]
@classmethod
def receive(self, data):
"""Return IncomingSMS instance from parsed data."""
data = QueryDict(data).copy()
sms = mobile.models.IncomingSMS(
message_id=data.get('MessageSid'),
country=data.get('FromCountry', None),
sender=data.get('From'),
recipient=data.get('To'),
message=data.get('Body'),
source=data
)
return sms.save()
class MMS:
@classmethod
def receive(self, data):
"""Return IncomingMMS instance from parsed data."""
raise NotImplementedError
|
<commit_before># encoding: utf-8
from twilio.rest import TwilioRestClient
from mobile.backends.base import BaseBackend
class Backend(BaseBackend):
"""Twilio Gate Backend."""
class SMS:
@classmethod
def send(self, recipient, sender, message):
"""
Send an SMS and return its initial delivery status code.
See twilio-python Documentation: https://github.com/twilio/twilio-python
"""
client = TwilioRestClient()
message = client.messages.create(
to=recipient,
from_=sender,
body=message
)
return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage]
@classmethod
def receive(self, data):
"""Return IncomingSMS instance from parsed data."""
raise NotImplementedError
class MMS:
@classmethod
def receive(self, data):
"""Return IncomingMMS instance from parsed data."""
raise NotImplementedError
<commit_msg>Add receive support to Twilio backend<commit_after>
|
# encoding: utf-8
import twilio.twiml
from django.http import QueryDict
from twilio.rest import TwilioRestClient
from mobile.backends.base import BaseBackend
import mobile.models
class Backend(BaseBackend):
"""Twilio Gate Backend."""
class SMS:
@classmethod
def send(self, recipient, sender, message):
"""
Send an SMS and return its initial delivery status code.
See twilio-python Documentation: https://github.com/twilio/twilio-python
"""
client = TwilioRestClient()
message = client.messages.create(
to=recipient,
from_=sender,
body=message
)
return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage]
@classmethod
def receive(self, data):
"""Return IncomingSMS instance from parsed data."""
data = QueryDict(data).copy()
sms = mobile.models.IncomingSMS(
message_id=data.get('MessageSid'),
country=data.get('FromCountry', None),
sender=data.get('From'),
recipient=data.get('To'),
message=data.get('Body'),
source=data
)
return sms.save()
class MMS:
@classmethod
def receive(self, data):
"""Return IncomingMMS instance from parsed data."""
raise NotImplementedError
|
# encoding: utf-8
from twilio.rest import TwilioRestClient
from mobile.backends.base import BaseBackend
class Backend(BaseBackend):
"""Twilio Gate Backend."""
class SMS:
@classmethod
def send(self, recipient, sender, message):
"""
Send an SMS and return its initial delivery status code.
See twilio-python Documentation: https://github.com/twilio/twilio-python
"""
client = TwilioRestClient()
message = client.messages.create(
to=recipient,
from_=sender,
body=message
)
return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage]
@classmethod
def receive(self, data):
"""Return IncomingSMS instance from parsed data."""
raise NotImplementedError
class MMS:
@classmethod
def receive(self, data):
"""Return IncomingMMS instance from parsed data."""
raise NotImplementedError
Add receive support to Twilio backend# encoding: utf-8
import twilio.twiml
from django.http import QueryDict
from twilio.rest import TwilioRestClient
from mobile.backends.base import BaseBackend
import mobile.models
class Backend(BaseBackend):
"""Twilio Gate Backend."""
class SMS:
@classmethod
def send(self, recipient, sender, message):
"""
Send an SMS and return its initial delivery status code.
See twilio-python Documentation: https://github.com/twilio/twilio-python
"""
client = TwilioRestClient()
message = client.messages.create(
to=recipient,
from_=sender,
body=message
)
return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage]
@classmethod
def receive(self, data):
"""Return IncomingSMS instance from parsed data."""
data = QueryDict(data).copy()
sms = mobile.models.IncomingSMS(
message_id=data.get('MessageSid'),
country=data.get('FromCountry', None),
sender=data.get('From'),
recipient=data.get('To'),
message=data.get('Body'),
source=data
)
return sms.save()
class MMS:
@classmethod
def receive(self, data):
"""Return IncomingMMS instance from parsed data."""
raise NotImplementedError
|
<commit_before># encoding: utf-8
from twilio.rest import TwilioRestClient
from mobile.backends.base import BaseBackend
class Backend(BaseBackend):
"""Twilio Gate Backend."""
class SMS:
@classmethod
def send(self, recipient, sender, message):
"""
Send an SMS and return its initial delivery status code.
See twilio-python Documentation: https://github.com/twilio/twilio-python
"""
client = TwilioRestClient()
message = client.messages.create(
to=recipient,
from_=sender,
body=message
)
return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage]
@classmethod
def receive(self, data):
"""Return IncomingSMS instance from parsed data."""
raise NotImplementedError
class MMS:
@classmethod
def receive(self, data):
"""Return IncomingMMS instance from parsed data."""
raise NotImplementedError
<commit_msg>Add receive support to Twilio backend<commit_after># encoding: utf-8
import twilio.twiml
from django.http import QueryDict
from twilio.rest import TwilioRestClient
from mobile.backends.base import BaseBackend
import mobile.models
class Backend(BaseBackend):
"""Twilio Gate Backend."""
class SMS:
@classmethod
def send(self, recipient, sender, message):
"""
Send an SMS and return its initial delivery status code.
See twilio-python Documentation: https://github.com/twilio/twilio-python
"""
client = TwilioRestClient()
message = client.messages.create(
to=recipient,
from_=sender,
body=message
)
return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage]
@classmethod
def receive(self, data):
"""Return IncomingSMS instance from parsed data."""
data = QueryDict(data).copy()
sms = mobile.models.IncomingSMS(
message_id=data.get('MessageSid'),
country=data.get('FromCountry', None),
sender=data.get('From'),
recipient=data.get('To'),
message=data.get('Body'),
source=data
)
return sms.save()
class MMS:
@classmethod
def receive(self, data):
"""Return IncomingMMS instance from parsed data."""
raise NotImplementedError
|
a6e868803e1336d83ee8863d15896880603fc777
|
tornwamp/customize.py
|
tornwamp/customize.py
|
"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor,
Code.PUBLISH: pubsub.PublishProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
Add PublishProcessor to processors' list
|
Add PublishProcessor to processors' list
|
Python
|
apache-2.0
|
ef-ctx/tornwamp
|
"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
Add PublishProcessor to processors' list
|
"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor,
Code.PUBLISH: pubsub.PublishProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
<commit_before>"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
<commit_msg>Add PublishProcessor to processors' list<commit_after>
|
"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor,
Code.PUBLISH: pubsub.PublishProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
Add PublishProcessor to processors' list"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor,
Code.PUBLISH: pubsub.PublishProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
<commit_before>"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
<commit_msg>Add PublishProcessor to processors' list<commit_after>"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor,
Code.PUBLISH: pubsub.PublishProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
b2e0a123631d326f06192a01758ebe581284dbdf
|
src/pip/_internal/operations/generate_metadata.py
|
src/pip/_internal/operations/generate_metadata.py
|
"""Metadata generation logic for source distributions.
"""
def get_metadata_generator(install_req):
if install_req.use_pep517:
return install_req.prepare_pep517_metadata
else:
return install_req.run_egg_info
|
"""Metadata generation logic for source distributions.
"""
def get_metadata_generator(install_req):
if not install_req.use_pep517:
return install_req.run_egg_info
return install_req.prepare_pep517_metadata
|
Return early for legacy processes
|
Return early for legacy processes
|
Python
|
mit
|
xavfernandez/pip,pfmoore/pip,rouge8/pip,rouge8/pip,pradyunsg/pip,rouge8/pip,pfmoore/pip,sbidoul/pip,xavfernandez/pip,pypa/pip,pradyunsg/pip,xavfernandez/pip,pypa/pip,sbidoul/pip
|
"""Metadata generation logic for source distributions.
"""
def get_metadata_generator(install_req):
if install_req.use_pep517:
return install_req.prepare_pep517_metadata
else:
return install_req.run_egg_info
Return early for legacy processes
|
"""Metadata generation logic for source distributions.
"""
def get_metadata_generator(install_req):
if not install_req.use_pep517:
return install_req.run_egg_info
return install_req.prepare_pep517_metadata
|
<commit_before>"""Metadata generation logic for source distributions.
"""
def get_metadata_generator(install_req):
if install_req.use_pep517:
return install_req.prepare_pep517_metadata
else:
return install_req.run_egg_info
<commit_msg>Return early for legacy processes<commit_after>
|
"""Metadata generation logic for source distributions.
"""
def get_metadata_generator(install_req):
if not install_req.use_pep517:
return install_req.run_egg_info
return install_req.prepare_pep517_metadata
|
"""Metadata generation logic for source distributions.
"""
def get_metadata_generator(install_req):
if install_req.use_pep517:
return install_req.prepare_pep517_metadata
else:
return install_req.run_egg_info
Return early for legacy processes"""Metadata generation logic for source distributions.
"""
def get_metadata_generator(install_req):
if not install_req.use_pep517:
return install_req.run_egg_info
return install_req.prepare_pep517_metadata
|
<commit_before>"""Metadata generation logic for source distributions.
"""
def get_metadata_generator(install_req):
if install_req.use_pep517:
return install_req.prepare_pep517_metadata
else:
return install_req.run_egg_info
<commit_msg>Return early for legacy processes<commit_after>"""Metadata generation logic for source distributions.
"""
def get_metadata_generator(install_req):
if not install_req.use_pep517:
return install_req.run_egg_info
return install_req.prepare_pep517_metadata
|
6456cfa00361a16fe53dfd62052d03567bcd66c0
|
clifford/_version.py
|
clifford/_version.py
|
# Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.2.0'
|
# Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev0'
|
Create a pre-release version for PyPI, to test the new readme format.
|
Create a pre-release version for PyPI, to test the new readme format.
|
Python
|
bsd-3-clause
|
arsenovic/clifford,arsenovic/clifford
|
# Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.2.0'
Create a pre-release version for PyPI, to test the new readme format.
|
# Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev0'
|
<commit_before># Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.2.0'
<commit_msg>Create a pre-release version for PyPI, to test the new readme format.<commit_after>
|
# Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev0'
|
# Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.2.0'
Create a pre-release version for PyPI, to test the new readme format.# Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev0'
|
<commit_before># Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.2.0'
<commit_msg>Create a pre-release version for PyPI, to test the new readme format.<commit_after># Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev0'
|
99b668594582882bb1fbca3b3793ff452edac2c1
|
updatebot/__init__.py
|
updatebot/__init__.py
|
#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.native import Bot as NativeBot
from updatebot.config import UpdateBotConfig
|
#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.config import UpdateBotConfig
|
Remove import of missing module
|
Remove import of missing module
|
Python
|
apache-2.0
|
sassoftware/mirrorball,sassoftware/mirrorball
|
#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.native import Bot as NativeBot
from updatebot.config import UpdateBotConfig
Remove import of missing module
|
#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.config import UpdateBotConfig
|
<commit_before>#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.native import Bot as NativeBot
from updatebot.config import UpdateBotConfig
<commit_msg>Remove import of missing module<commit_after>
|
#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.config import UpdateBotConfig
|
#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.native import Bot as NativeBot
from updatebot.config import UpdateBotConfig
Remove import of missing module#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.config import UpdateBotConfig
|
<commit_before>#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.native import Bot as NativeBot
from updatebot.config import UpdateBotConfig
<commit_msg>Remove import of missing module<commit_after>#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.config import UpdateBotConfig
|
f4777e994a29a8dbc704950411156cca4ff59ac3
|
oscar/core/compat.py
|
oscar/core/compat.py
|
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
|
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
try:
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
|
Use better exception for AUTH_USER_MODEL
|
Use better exception for AUTH_USER_MODEL
If AUTH_USER_MODEL is improperly configured as 'project.customer.User',
the error is:
ValueError: too many values to unpack
Use rather standard Django's error:
ImproperlyConfigured: AUTH_USER_MODEL must be of the form
'app_label.model_name'
|
Python
|
bsd-3-clause
|
faratro/django-oscar,rocopartners/django-oscar,thechampanurag/django-oscar,spartonia/django-oscar,vovanbo/django-oscar,saadatqadri/django-oscar,sasha0/django-oscar,pdonadeo/django-oscar,monikasulik/django-oscar,bschuon/django-oscar,mexeniz/django-oscar,jinnykoo/christmas,QLGu/django-oscar,manevant/django-oscar,jinnykoo/christmas,anentropic/django-oscar,anentropic/django-oscar,michaelkuty/django-oscar,john-parton/django-oscar,django-oscar/django-oscar,faratro/django-oscar,john-parton/django-oscar,marcoantoniooliveira/labweb,kapari/django-oscar,saadatqadri/django-oscar,pdonadeo/django-oscar,elliotthill/django-oscar,ahmetdaglarbas/e-commerce,kapari/django-oscar,elliotthill/django-oscar,lijoantony/django-oscar,pasqualguerrero/django-oscar,kapt/django-oscar,bnprk/django-oscar,solarissmoke/django-oscar,binarydud/django-oscar,marcoantoniooliveira/labweb,saadatqadri/django-oscar,jinnykoo/christmas,eddiep1101/django-oscar,monikasulik/django-oscar,faratro/django-oscar,binarydud/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj,anentropic/django-oscar,manevant/django-oscar,MatthewWilkes/django-oscar,makielab/django-oscar,rocopartners/django-oscar,amirrpp/django-oscar,WadeYuChen/django-oscar,thechampanurag/django-oscar,thechampanurag/django-oscar,machtfit/django-oscar,john-parton/django-oscar,jlmadurga/django-oscar,eddiep1101/django-oscar,ka7eh/django-oscar,ka7eh/django-oscar,jlmadurga/django-oscar,pasqualguerrero/django-oscar,ahmetdaglarbas/e-commerce,manevant/django-oscar,taedori81/django-oscar,ahmetdaglarbas/e-commerce,ahmetdaglarbas/e-commerce,Bogh/django-oscar,Bogh/django-oscar,vovanbo/django-oscar,marcoantoniooliveira/labweb,solarissmoke/django-oscar,django-oscar/django-oscar,jinnykoo/wuyisj.com,sasha0/django-oscar,rocopartners/django-oscar,WillisXChen/django-oscar,itbabu/django-oscar,machtfit/django-oscar,bnprk/django-oscar,faratro/django-oscar,rocopartners/django-oscar,okfish/django-oscar,WillisXChen/django-oscar,josesanch/django-oscar,bnprk/django-oscar,nfletton/django-oscar,WadeYuChen/django-oscar,adamend/django-oscar,QLGu/django-oscar,MatthewWilkes/django-oscar,saadatqadri/django-oscar,jinnykoo/wuyisj,dongguangming/django-oscar,nickpack/django-oscar,taedori81/django-oscar,jinnykoo/wuyisj.com,QLGu/django-oscar,john-parton/django-oscar,amirrpp/django-oscar,sasha0/django-oscar,ademuk/django-oscar,Jannes123/django-oscar,nickpack/django-oscar,lijoantony/django-oscar,bschuon/django-oscar,nickpack/django-oscar,kapt/django-oscar,okfish/django-oscar,mexeniz/django-oscar,Jannes123/django-oscar,makielab/django-oscar,dongguangming/django-oscar,Idematica/django-oscar,django-oscar/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,Jannes123/django-oscar,pasqualguerrero/django-oscar,pdonadeo/django-oscar,sonofatailor/django-oscar,jlmadurga/django-oscar,vovanbo/django-oscar,ka7eh/django-oscar,spartonia/django-oscar,monikasulik/django-oscar,amirrpp/django-oscar,nickpack/django-oscar,josesanch/django-oscar,bschuon/django-oscar,vovanbo/django-oscar,MatthewWilkes/django-oscar,taedori81/django-oscar,marcoantoniooliveira/labweb,Idematica/django-oscar,manevant/django-oscar,ademuk/django-oscar,elliotthill/django-oscar,dongguangming/django-oscar,taedori81/django-oscar,adamend/django-oscar,QLGu/django-oscar,itbabu/django-oscar,spartonia/django-oscar,bnprk/django-oscar,pasqualguerrero/django-oscar,jmt4/django-oscar,binarydud/django-oscar,anentropic/django-oscar,pdonadeo/django-oscar,bschuon/django-oscar,okfish/django-oscar,sonofatailor/django-oscar,nfletton/django-oscar,thechampanurag/django-oscar,sonofatailor/django-oscar,ademuk/django-oscar,jinnykoo/wuyisj,monikasulik/django-oscar,kapari/django-oscar,ka7eh/django-oscar,michaelkuty/django-oscar,MatthewWilkes/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,Bogh/django-oscar,mexeniz/django-oscar,amirrpp/django-oscar,jmt4/django-oscar,Bogh/django-oscar,mexeniz/django-oscar,jmt4/django-oscar,jinnykoo/wuyisj.com,jinnykoo/wuyisj,WadeYuChen/django-oscar,eddiep1101/django-oscar,kapari/django-oscar,spartonia/django-oscar,solarissmoke/django-oscar,michaelkuty/django-oscar,nfletton/django-oscar,sonofatailor/django-oscar,makielab/django-oscar,dongguangming/django-oscar,DrOctogon/unwash_ecom,sasha0/django-oscar,adamend/django-oscar,WadeYuChen/django-oscar,jlmadurga/django-oscar,Idematica/django-oscar,makielab/django-oscar,solarissmoke/django-oscar,okfish/django-oscar,kapt/django-oscar,eddiep1101/django-oscar,WillisXChen/django-oscar,josesanch/django-oscar,Jannes123/django-oscar,itbabu/django-oscar,michaelkuty/django-oscar,ademuk/django-oscar,machtfit/django-oscar,binarydud/django-oscar,nfletton/django-oscar,jinnykoo/wuyisj.com,jmt4/django-oscar,adamend/django-oscar,DrOctogon/unwash_ecom,django-oscar/django-oscar,WillisXChen/django-oscar,DrOctogon/unwash_ecom
|
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
Use better exception for AUTH_USER_MODEL
If AUTH_USER_MODEL is improperly configured as 'project.customer.User',
the error is:
ValueError: too many values to unpack
Use rather standard Django's error:
ImproperlyConfigured: AUTH_USER_MODEL must be of the form
'app_label.model_name'
|
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
try:
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
|
<commit_before>from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
<commit_msg>Use better exception for AUTH_USER_MODEL
If AUTH_USER_MODEL is improperly configured as 'project.customer.User',
the error is:
ValueError: too many values to unpack
Use rather standard Django's error:
ImproperlyConfigured: AUTH_USER_MODEL must be of the form
'app_label.model_name'<commit_after>
|
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
try:
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
|
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
Use better exception for AUTH_USER_MODEL
If AUTH_USER_MODEL is improperly configured as 'project.customer.User',
the error is:
ValueError: too many values to unpack
Use rather standard Django's error:
ImproperlyConfigured: AUTH_USER_MODEL must be of the form
'app_label.model_name'from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
try:
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
|
<commit_before>from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
<commit_msg>Use better exception for AUTH_USER_MODEL
If AUTH_USER_MODEL is improperly configured as 'project.customer.User',
the error is:
ValueError: too many values to unpack
Use rather standard Django's error:
ImproperlyConfigured: AUTH_USER_MODEL must be of the form
'app_label.model_name'<commit_after>from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
try:
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
|
e280ae4d1780448c8940b060d151c0668c205f91
|
parquet/bitstring.py
|
parquet/bitstring.py
|
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
|
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = (divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
|
Add closing paren to tuple expression
|
Add closing paren to tuple expression
Under Python 2.7.6, this file didn't compile for me as-is. I still need to clone and rerun the test suite, but I thought I'd try Github's nifty "fork and edit online" feature. Will comment again when the tests pass.
|
Python
|
apache-2.0
|
cloudera/hue,cloudera/hue,lumig242/Hue-Integration-with-CDAP,kawamon/hue,xq262144/hue,lumig242/Hue-Integration-with-CDAP,fangxingli/hue,Peddle/hue,h4ck3rm1k3/parquet-python,todaychi/hue,kawamon/hue,cloudera/hue,cloudera/hue,todaychi/hue,xq262144/hue,Peddle/hue,jjmleiro/hue,jjmleiro/hue,jjmleiro/hue,jcrobak/parquet-python,kawamon/hue,todaychi/hue,todaychi/hue,jayceyxc/hue,cloudera/hue,lumig242/Hue-Integration-with-CDAP,kawamon/hue,cloudera/hue,todaychi/hue,cloudera/hue,xq262144/hue,cloudera/hue,MobinRanjbar/hue,kawamon/hue,kawamon/hue,jayceyxc/hue,cloudera/hue,xq262144/hue,jjmleiro/hue,jayceyxc/hue,kawamon/hue,Peddle/hue,lumig242/Hue-Integration-with-CDAP,fangxingli/hue,cloudera/hue,cloudera/hue,lumig242/Hue-Integration-with-CDAP,MobinRanjbar/hue,jayceyxc/hue,MobinRanjbar/hue,xq262144/hue,Peddle/hue,todaychi/hue,fangxingli/hue,fangxingli/hue,cloudera/hue,cloudera/hue,todaychi/hue,jjmleiro/hue,fangxingli/hue,todaychi/hue,Peddle/hue,jjmleiro/hue,kawamon/hue,todaychi/hue,fangxingli/hue,kawamon/hue,kawamon/hue,kawamon/hue,cloudera/hue,lumig242/Hue-Integration-with-CDAP,kawamon/hue,Peddle/hue,MobinRanjbar/hue,lumig242/Hue-Integration-with-CDAP,MobinRanjbar/hue,xq262144/hue,xq262144/hue,MobinRanjbar/hue,jayceyxc/hue,kawamon/hue,jayceyxc/hue,xq262144/hue,xq262144/hue,lumig242/Hue-Integration-with-CDAP,cloudera/hue,kawamon/hue,jjmleiro/hue,kawamon/hue,jayceyxc/hue,cloudera/hue,kawamon/hue,jjmleiro/hue,cloudera/hue,kawamon/hue,Peddle/hue,fangxingli/hue,kawamon/hue,MobinRanjbar/hue,fangxingli/hue,jayceyxc/hue,MobinRanjbar/hue,jjmleiro/hue,Peddle/hue,lumig242/Hue-Integration-with-CDAP,Peddle/hue,cloudera/hue,jayceyxc/hue
|
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
Add closing paren to tuple expression
Under Python 2.7.6, this file didn't compile for me as-is. I still need to clone and rerun the test suite, but I thought I'd try Github's nifty "fork and edit online" feature. Will comment again when the tests pass.
|
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = (divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
|
<commit_before>
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
<commit_msg>Add closing paren to tuple expression
Under Python 2.7.6, this file didn't compile for me as-is. I still need to clone and rerun the test suite, but I thought I'd try Github's nifty "fork and edit online" feature. Will comment again when the tests pass.<commit_after>
|
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = (divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
|
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
Add closing paren to tuple expression
Under Python 2.7.6, this file didn't compile for me as-is. I still need to clone and rerun the test suite, but I thought I'd try Github's nifty "fork and edit online" feature. Will comment again when the tests pass.
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = (divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
|
<commit_before>
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
<commit_msg>Add closing paren to tuple expression
Under Python 2.7.6, this file didn't compile for me as-is. I still need to clone and rerun the test suite, but I thought I'd try Github's nifty "fork and edit online" feature. Will comment again when the tests pass.<commit_after>
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = (divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
|
690c70db9717bcc538db4e35597145870106844f
|
versioning/signals.py
|
versioning/signals.py
|
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.split("\n"),
new_data.split("\n"), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
|
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.splitlines(),
new_data.splitlines(), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
|
Use splitlines instead of hard-coding the line endings.
|
Use splitlines instead of hard-coding the line endings.
git-svn-id: 15b99a5ef70b6649222be30eb13433ba2eb40757@14 cdb1d5cb-5653-0410-9e46-1b5f511687a6
|
Python
|
bsd-3-clause
|
luzfcb/django-versioning,luzfcb/django-versioning
|
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.split("\n"),
new_data.split("\n"), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
Use splitlines instead of hard-coding the line endings.
git-svn-id: 15b99a5ef70b6649222be30eb13433ba2eb40757@14 cdb1d5cb-5653-0410-9e46-1b5f511687a6
|
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.splitlines(),
new_data.splitlines(), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
|
<commit_before>
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.split("\n"),
new_data.split("\n"), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
<commit_msg>Use splitlines instead of hard-coding the line endings.
git-svn-id: 15b99a5ef70b6649222be30eb13433ba2eb40757@14 cdb1d5cb-5653-0410-9e46-1b5f511687a6<commit_after>
|
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.splitlines(),
new_data.splitlines(), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
|
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.split("\n"),
new_data.split("\n"), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
Use splitlines instead of hard-coding the line endings.
git-svn-id: 15b99a5ef70b6649222be30eb13433ba2eb40757@14 cdb1d5cb-5653-0410-9e46-1b5f511687a6
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.splitlines(),
new_data.splitlines(), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
|
<commit_before>
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.split("\n"),
new_data.split("\n"), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
<commit_msg>Use splitlines instead of hard-coding the line endings.
git-svn-id: 15b99a5ef70b6649222be30eb13433ba2eb40757@14 cdb1d5cb-5653-0410-9e46-1b5f511687a6<commit_after>
import sha
from django.contrib.contenttypes.models import ContentType
from versioning import _registry
from versioning.diff import unified_diff
from versioning.models import Revision
def pre_save(instance, **kwargs):
"""
"""
model = kwargs["sender"]
fields = _registry[model]
original = model._default_manager.get(pk=instance.pk)
ct = ContentType.objects.get_for_model(model)
diff = []
for field in fields:
original_data = getattr(original, field)
new_data = getattr(instance, field)
data_diff = unified_diff(original_data.splitlines(),
new_data.splitlines(), context=3)
diff.extend(["--- %s.%s" % (model.__name__, field),
"+++ %s.%s" % (model.__name__, field)])
for line in data_diff:
diff.append(line)
delta = "\n".join(diff)
sha1 = sha.new(delta)
rev = Revision(sha1=sha1.hexdigest(), object_pk=instance.pk,
content_type=ct, delta=delta)
rev.save()
|
c79d040cb952e8e37c231caf90eda92d152978b8
|
openfisca_country_template/__init__.py
|
openfisca_country_template/__init__.py
|
# -*- coding: utf-8 -*-
import os
from openfisca_core.taxbenefitsystems import TaxBenefitSystem
from . import entities
COUNTRY_DIR = os.path.dirname(os.path.abspath(__file__))
# Our country tax and benefit class inherits from the general TaxBenefitSystem class.
# The name CountryTaxBenefitSystem must not be changed, as all tools of the OpenFisca ecosystem expect a CountryTaxBenefitSystem class to be exposed in the __init__ module of a country package.
class CountryTaxBenefitSystem(TaxBenefitSystem):
def __init__(self):
# We initialize our tax and benefit system with the general constructor
super(CountryTaxBenefitSystem, self).__init__(entities.entities)
# We add to our tax and benefit system all the variables
self.add_variables_from_directory(os.path.join(COUNTRY_DIR, 'variables'))
# We add to our tax and benefit system all the legislation parameters defined in the parameters files
param_files = [
'__root__.xml',
'benefits.xml',
'general.xml',
'taxes.xml',
]
for param_file in param_files:
param_path = os.path.join(COUNTRY_DIR, 'parameters', param_file)
self.add_legislation_params(param_path)
|
# -*- coding: utf-8 -*-
import os
from openfisca_core.taxbenefitsystems import TaxBenefitSystem
from . import entities
COUNTRY_DIR = os.path.dirname(os.path.abspath(__file__))
# Our country tax and benefit class inherits from the general TaxBenefitSystem class.
# The name CountryTaxBenefitSystem must not be changed, as all tools of the OpenFisca ecosystem expect a CountryTaxBenefitSystem class to be exposed in the __init__ module of a country package.
class CountryTaxBenefitSystem(TaxBenefitSystem):
def __init__(self):
# We initialize our tax and benefit system with the general constructor
super(CountryTaxBenefitSystem, self).__init__(entities.entities)
# We add to our tax and benefit system all the variables
self.add_variables_from_directory(os.path.join(COUNTRY_DIR, 'variables'))
# We add to our tax and benefit system all the legislation parameters defined in the parameters files
param_path = os.path.join(COUNTRY_DIR, 'parameters')
self.add_legislation_params(param_path)
|
Use YAML params instead of XML params
|
Use YAML params instead of XML params
|
Python
|
agpl-3.0
|
openfisca/country-template,openfisca/country-template
|
# -*- coding: utf-8 -*-
import os
from openfisca_core.taxbenefitsystems import TaxBenefitSystem
from . import entities
COUNTRY_DIR = os.path.dirname(os.path.abspath(__file__))
# Our country tax and benefit class inherits from the general TaxBenefitSystem class.
# The name CountryTaxBenefitSystem must not be changed, as all tools of the OpenFisca ecosystem expect a CountryTaxBenefitSystem class to be exposed in the __init__ module of a country package.
class CountryTaxBenefitSystem(TaxBenefitSystem):
def __init__(self):
# We initialize our tax and benefit system with the general constructor
super(CountryTaxBenefitSystem, self).__init__(entities.entities)
# We add to our tax and benefit system all the variables
self.add_variables_from_directory(os.path.join(COUNTRY_DIR, 'variables'))
# We add to our tax and benefit system all the legislation parameters defined in the parameters files
param_files = [
'__root__.xml',
'benefits.xml',
'general.xml',
'taxes.xml',
]
for param_file in param_files:
param_path = os.path.join(COUNTRY_DIR, 'parameters', param_file)
self.add_legislation_params(param_path)
Use YAML params instead of XML params
|
# -*- coding: utf-8 -*-
import os
from openfisca_core.taxbenefitsystems import TaxBenefitSystem
from . import entities
COUNTRY_DIR = os.path.dirname(os.path.abspath(__file__))
# Our country tax and benefit class inherits from the general TaxBenefitSystem class.
# The name CountryTaxBenefitSystem must not be changed, as all tools of the OpenFisca ecosystem expect a CountryTaxBenefitSystem class to be exposed in the __init__ module of a country package.
class CountryTaxBenefitSystem(TaxBenefitSystem):
def __init__(self):
# We initialize our tax and benefit system with the general constructor
super(CountryTaxBenefitSystem, self).__init__(entities.entities)
# We add to our tax and benefit system all the variables
self.add_variables_from_directory(os.path.join(COUNTRY_DIR, 'variables'))
# We add to our tax and benefit system all the legislation parameters defined in the parameters files
param_path = os.path.join(COUNTRY_DIR, 'parameters')
self.add_legislation_params(param_path)
|
<commit_before># -*- coding: utf-8 -*-
import os
from openfisca_core.taxbenefitsystems import TaxBenefitSystem
from . import entities
COUNTRY_DIR = os.path.dirname(os.path.abspath(__file__))
# Our country tax and benefit class inherits from the general TaxBenefitSystem class.
# The name CountryTaxBenefitSystem must not be changed, as all tools of the OpenFisca ecosystem expect a CountryTaxBenefitSystem class to be exposed in the __init__ module of a country package.
class CountryTaxBenefitSystem(TaxBenefitSystem):
def __init__(self):
# We initialize our tax and benefit system with the general constructor
super(CountryTaxBenefitSystem, self).__init__(entities.entities)
# We add to our tax and benefit system all the variables
self.add_variables_from_directory(os.path.join(COUNTRY_DIR, 'variables'))
# We add to our tax and benefit system all the legislation parameters defined in the parameters files
param_files = [
'__root__.xml',
'benefits.xml',
'general.xml',
'taxes.xml',
]
for param_file in param_files:
param_path = os.path.join(COUNTRY_DIR, 'parameters', param_file)
self.add_legislation_params(param_path)
<commit_msg>Use YAML params instead of XML params<commit_after>
|
# -*- coding: utf-8 -*-
import os
from openfisca_core.taxbenefitsystems import TaxBenefitSystem
from . import entities
COUNTRY_DIR = os.path.dirname(os.path.abspath(__file__))
# Our country tax and benefit class inherits from the general TaxBenefitSystem class.
# The name CountryTaxBenefitSystem must not be changed, as all tools of the OpenFisca ecosystem expect a CountryTaxBenefitSystem class to be exposed in the __init__ module of a country package.
class CountryTaxBenefitSystem(TaxBenefitSystem):
def __init__(self):
# We initialize our tax and benefit system with the general constructor
super(CountryTaxBenefitSystem, self).__init__(entities.entities)
# We add to our tax and benefit system all the variables
self.add_variables_from_directory(os.path.join(COUNTRY_DIR, 'variables'))
# We add to our tax and benefit system all the legislation parameters defined in the parameters files
param_path = os.path.join(COUNTRY_DIR, 'parameters')
self.add_legislation_params(param_path)
|
# -*- coding: utf-8 -*-
import os
from openfisca_core.taxbenefitsystems import TaxBenefitSystem
from . import entities
COUNTRY_DIR = os.path.dirname(os.path.abspath(__file__))
# Our country tax and benefit class inherits from the general TaxBenefitSystem class.
# The name CountryTaxBenefitSystem must not be changed, as all tools of the OpenFisca ecosystem expect a CountryTaxBenefitSystem class to be exposed in the __init__ module of a country package.
class CountryTaxBenefitSystem(TaxBenefitSystem):
def __init__(self):
# We initialize our tax and benefit system with the general constructor
super(CountryTaxBenefitSystem, self).__init__(entities.entities)
# We add to our tax and benefit system all the variables
self.add_variables_from_directory(os.path.join(COUNTRY_DIR, 'variables'))
# We add to our tax and benefit system all the legislation parameters defined in the parameters files
param_files = [
'__root__.xml',
'benefits.xml',
'general.xml',
'taxes.xml',
]
for param_file in param_files:
param_path = os.path.join(COUNTRY_DIR, 'parameters', param_file)
self.add_legislation_params(param_path)
Use YAML params instead of XML params# -*- coding: utf-8 -*-
import os
from openfisca_core.taxbenefitsystems import TaxBenefitSystem
from . import entities
COUNTRY_DIR = os.path.dirname(os.path.abspath(__file__))
# Our country tax and benefit class inherits from the general TaxBenefitSystem class.
# The name CountryTaxBenefitSystem must not be changed, as all tools of the OpenFisca ecosystem expect a CountryTaxBenefitSystem class to be exposed in the __init__ module of a country package.
class CountryTaxBenefitSystem(TaxBenefitSystem):
def __init__(self):
# We initialize our tax and benefit system with the general constructor
super(CountryTaxBenefitSystem, self).__init__(entities.entities)
# We add to our tax and benefit system all the variables
self.add_variables_from_directory(os.path.join(COUNTRY_DIR, 'variables'))
# We add to our tax and benefit system all the legislation parameters defined in the parameters files
param_path = os.path.join(COUNTRY_DIR, 'parameters')
self.add_legislation_params(param_path)
|
<commit_before># -*- coding: utf-8 -*-
import os
from openfisca_core.taxbenefitsystems import TaxBenefitSystem
from . import entities
COUNTRY_DIR = os.path.dirname(os.path.abspath(__file__))
# Our country tax and benefit class inherits from the general TaxBenefitSystem class.
# The name CountryTaxBenefitSystem must not be changed, as all tools of the OpenFisca ecosystem expect a CountryTaxBenefitSystem class to be exposed in the __init__ module of a country package.
class CountryTaxBenefitSystem(TaxBenefitSystem):
def __init__(self):
# We initialize our tax and benefit system with the general constructor
super(CountryTaxBenefitSystem, self).__init__(entities.entities)
# We add to our tax and benefit system all the variables
self.add_variables_from_directory(os.path.join(COUNTRY_DIR, 'variables'))
# We add to our tax and benefit system all the legislation parameters defined in the parameters files
param_files = [
'__root__.xml',
'benefits.xml',
'general.xml',
'taxes.xml',
]
for param_file in param_files:
param_path = os.path.join(COUNTRY_DIR, 'parameters', param_file)
self.add_legislation_params(param_path)
<commit_msg>Use YAML params instead of XML params<commit_after># -*- coding: utf-8 -*-
import os
from openfisca_core.taxbenefitsystems import TaxBenefitSystem
from . import entities
COUNTRY_DIR = os.path.dirname(os.path.abspath(__file__))
# Our country tax and benefit class inherits from the general TaxBenefitSystem class.
# The name CountryTaxBenefitSystem must not be changed, as all tools of the OpenFisca ecosystem expect a CountryTaxBenefitSystem class to be exposed in the __init__ module of a country package.
class CountryTaxBenefitSystem(TaxBenefitSystem):
def __init__(self):
# We initialize our tax and benefit system with the general constructor
super(CountryTaxBenefitSystem, self).__init__(entities.entities)
# We add to our tax and benefit system all the variables
self.add_variables_from_directory(os.path.join(COUNTRY_DIR, 'variables'))
# We add to our tax and benefit system all the legislation parameters defined in the parameters files
param_path = os.path.join(COUNTRY_DIR, 'parameters')
self.add_legislation_params(param_path)
|
07bd41f4588570a3b026efad0a70d979f4bf8e5b
|
esis/__init__.py
|
esis/__init__.py
|
# -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
__author__ = 'Javier Collado'
__email__ = 'jcollado@nowsecure.com'
__version__ = '0.2.0'
|
# -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
__author__ = 'Javier Collado'
__email__ = 'jcollado@nowsecure.com'
__version__ = '0.2.0'
from esis.es import Client
|
Make client available at the package level
|
Make client available at the package level
This will make imports easier for anyone willing to use esis as a
library
|
Python
|
mit
|
jcollado/esis
|
# -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
__author__ = 'Javier Collado'
__email__ = 'jcollado@nowsecure.com'
__version__ = '0.2.0'
Make client available at the package level
This will make imports easier for anyone willing to use esis as a
library
|
# -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
__author__ = 'Javier Collado'
__email__ = 'jcollado@nowsecure.com'
__version__ = '0.2.0'
from esis.es import Client
|
<commit_before># -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
__author__ = 'Javier Collado'
__email__ = 'jcollado@nowsecure.com'
__version__ = '0.2.0'
<commit_msg>Make client available at the package level
This will make imports easier for anyone willing to use esis as a
library<commit_after>
|
# -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
__author__ = 'Javier Collado'
__email__ = 'jcollado@nowsecure.com'
__version__ = '0.2.0'
from esis.es import Client
|
# -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
__author__ = 'Javier Collado'
__email__ = 'jcollado@nowsecure.com'
__version__ = '0.2.0'
Make client available at the package level
This will make imports easier for anyone willing to use esis as a
library# -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
__author__ = 'Javier Collado'
__email__ = 'jcollado@nowsecure.com'
__version__ = '0.2.0'
from esis.es import Client
|
<commit_before># -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
__author__ = 'Javier Collado'
__email__ = 'jcollado@nowsecure.com'
__version__ = '0.2.0'
<commit_msg>Make client available at the package level
This will make imports easier for anyone willing to use esis as a
library<commit_after># -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
__author__ = 'Javier Collado'
__email__ = 'jcollado@nowsecure.com'
__version__ = '0.2.0'
from esis.es import Client
|
8f5f31ef9543ad345c894103dbda94358a5e4eee
|
apps/storybase_user/models.py
|
apps/storybase_user/models.py
|
from django.contrib.auth.models import User
from django.db import models
from uuidfield.fields import UUIDField
from storybase.fields import ShortTextField
class Organization(models.Model):
""" An organization or a community group that users and stories can be associated with. """
organization_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
members = models.ManyToManyField(User, related_name='organizations', blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('organization_detail', [self.organization_id])
class Project(models.Model):
"""
A project that collects related stories.
Users can also be related to projects.
"""
project_id = UUIDField(auto=True)
name = models.CharField(max_length=200)
slug = models.SlugField()
members = models.ManyToManyField(User, related_name='projects', blank=True)
def __unicode__(self):
return self.name
|
from django.contrib.auth.models import User
from django.db import models
from uuidfield.fields import UUIDField
from storybase.fields import ShortTextField
class Organization(models.Model):
""" An organization or a community group that users and stories can be associated with. """
organization_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
members = models.ManyToManyField(User, related_name='organizations', blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('organization_detail', [self.organization_id])
class Project(models.Model):
"""
A project that collects related stories.
Users can also be related to projects.
"""
project_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
organizations = models.ManyToManyField(Organization, related_name='projects', blank=True)
members = models.ManyToManyField(User, related_name='projects', blank=True)
# TODO: Add Stories field to Project
def __unicode__(self):
return self.name
|
Revert "Revert "Updated fields for Project model.""
|
Revert "Revert "Updated fields for Project model.""
This reverts commit 726662d102453f7c7be5fb31499a8c4d5ab34444.
|
Python
|
mit
|
denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase
|
from django.contrib.auth.models import User
from django.db import models
from uuidfield.fields import UUIDField
from storybase.fields import ShortTextField
class Organization(models.Model):
""" An organization or a community group that users and stories can be associated with. """
organization_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
members = models.ManyToManyField(User, related_name='organizations', blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('organization_detail', [self.organization_id])
class Project(models.Model):
"""
A project that collects related stories.
Users can also be related to projects.
"""
project_id = UUIDField(auto=True)
name = models.CharField(max_length=200)
slug = models.SlugField()
members = models.ManyToManyField(User, related_name='projects', blank=True)
def __unicode__(self):
return self.name
Revert "Revert "Updated fields for Project model.""
This reverts commit 726662d102453f7c7be5fb31499a8c4d5ab34444.
|
from django.contrib.auth.models import User
from django.db import models
from uuidfield.fields import UUIDField
from storybase.fields import ShortTextField
class Organization(models.Model):
""" An organization or a community group that users and stories can be associated with. """
organization_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
members = models.ManyToManyField(User, related_name='organizations', blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('organization_detail', [self.organization_id])
class Project(models.Model):
"""
A project that collects related stories.
Users can also be related to projects.
"""
project_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
organizations = models.ManyToManyField(Organization, related_name='projects', blank=True)
members = models.ManyToManyField(User, related_name='projects', blank=True)
# TODO: Add Stories field to Project
def __unicode__(self):
return self.name
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
from uuidfield.fields import UUIDField
from storybase.fields import ShortTextField
class Organization(models.Model):
""" An organization or a community group that users and stories can be associated with. """
organization_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
members = models.ManyToManyField(User, related_name='organizations', blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('organization_detail', [self.organization_id])
class Project(models.Model):
"""
A project that collects related stories.
Users can also be related to projects.
"""
project_id = UUIDField(auto=True)
name = models.CharField(max_length=200)
slug = models.SlugField()
members = models.ManyToManyField(User, related_name='projects', blank=True)
def __unicode__(self):
return self.name
<commit_msg>Revert "Revert "Updated fields for Project model.""
This reverts commit 726662d102453f7c7be5fb31499a8c4d5ab34444.<commit_after>
|
from django.contrib.auth.models import User
from django.db import models
from uuidfield.fields import UUIDField
from storybase.fields import ShortTextField
class Organization(models.Model):
""" An organization or a community group that users and stories can be associated with. """
organization_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
members = models.ManyToManyField(User, related_name='organizations', blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('organization_detail', [self.organization_id])
class Project(models.Model):
"""
A project that collects related stories.
Users can also be related to projects.
"""
project_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
organizations = models.ManyToManyField(Organization, related_name='projects', blank=True)
members = models.ManyToManyField(User, related_name='projects', blank=True)
# TODO: Add Stories field to Project
def __unicode__(self):
return self.name
|
from django.contrib.auth.models import User
from django.db import models
from uuidfield.fields import UUIDField
from storybase.fields import ShortTextField
class Organization(models.Model):
""" An organization or a community group that users and stories can be associated with. """
organization_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
members = models.ManyToManyField(User, related_name='organizations', blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('organization_detail', [self.organization_id])
class Project(models.Model):
"""
A project that collects related stories.
Users can also be related to projects.
"""
project_id = UUIDField(auto=True)
name = models.CharField(max_length=200)
slug = models.SlugField()
members = models.ManyToManyField(User, related_name='projects', blank=True)
def __unicode__(self):
return self.name
Revert "Revert "Updated fields for Project model.""
This reverts commit 726662d102453f7c7be5fb31499a8c4d5ab34444.from django.contrib.auth.models import User
from django.db import models
from uuidfield.fields import UUIDField
from storybase.fields import ShortTextField
class Organization(models.Model):
""" An organization or a community group that users and stories can be associated with. """
organization_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
members = models.ManyToManyField(User, related_name='organizations', blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('organization_detail', [self.organization_id])
class Project(models.Model):
"""
A project that collects related stories.
Users can also be related to projects.
"""
project_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
organizations = models.ManyToManyField(Organization, related_name='projects', blank=True)
members = models.ManyToManyField(User, related_name='projects', blank=True)
# TODO: Add Stories field to Project
def __unicode__(self):
return self.name
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
from uuidfield.fields import UUIDField
from storybase.fields import ShortTextField
class Organization(models.Model):
""" An organization or a community group that users and stories can be associated with. """
organization_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
members = models.ManyToManyField(User, related_name='organizations', blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('organization_detail', [self.organization_id])
class Project(models.Model):
"""
A project that collects related stories.
Users can also be related to projects.
"""
project_id = UUIDField(auto=True)
name = models.CharField(max_length=200)
slug = models.SlugField()
members = models.ManyToManyField(User, related_name='projects', blank=True)
def __unicode__(self):
return self.name
<commit_msg>Revert "Revert "Updated fields for Project model.""
This reverts commit 726662d102453f7c7be5fb31499a8c4d5ab34444.<commit_after>from django.contrib.auth.models import User
from django.db import models
from uuidfield.fields import UUIDField
from storybase.fields import ShortTextField
class Organization(models.Model):
""" An organization or a community group that users and stories can be associated with. """
organization_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
members = models.ManyToManyField(User, related_name='organizations', blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('organization_detail', [self.organization_id])
class Project(models.Model):
"""
A project that collects related stories.
Users can also be related to projects.
"""
project_id = UUIDField(auto=True)
name = ShortTextField()
slug = models.SlugField()
website_url = models.URLField(blank=True)
description = models.TextField(blank=True)
created = models.DateTimeField(auto_now_add=True)
last_edited = models.DateTimeField(auto_now=True)
organizations = models.ManyToManyField(Organization, related_name='projects', blank=True)
members = models.ManyToManyField(User, related_name='projects', blank=True)
# TODO: Add Stories field to Project
def __unicode__(self):
return self.name
|
cc3f28e74145729c8b572fd9d2ed04d8fb297360
|
Testing/TestDICOMPython.py
|
Testing/TestDICOMPython.py
|
#! /usr/bin/env python2
import sys
import vtk
import vtkDICOMPython
# put everything into the vtk namespace
for a in dir(vtkDICOMPython):
if a[0] != '_':
setattr(vtk, a, getattr(vtkDICOMPython, a))
m = vtk.vtkDICOMMetaData()
m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100')
v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005))
if v.AsString() != 'ISO_IR 100':
sys.exit(1)
|
#! /usr/bin/env python2
import sys
import vtk
import vtkDICOMPython
# put everything into the vtk namespace
for a in dir(vtkDICOMPython):
if a[0] != '_':
setattr(vtk, a, getattr(vtkDICOMPython, a))
m = vtk.vtkDICOMMetaData()
if vtk.vtkVersion.GetVTKMajorVersion() < 6:
sys.stderr.write("This test requires VTK 6 or higher.\n");
sys.exit(0)
m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100')
v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005))
if v.AsString() != 'ISO_IR 100':
sys.exit(1)
|
Modify python test for VTK 5.
|
Modify python test for VTK 5.
|
Python
|
bsd-3-clause
|
dgobbi/vtk-dicom,dgobbi/vtk-dicom,hendradarwin/vtk-dicom,dgobbi/vtk-dicom,hendradarwin/vtk-dicom,hendradarwin/vtk-dicom
|
#! /usr/bin/env python2
import sys
import vtk
import vtkDICOMPython
# put everything into the vtk namespace
for a in dir(vtkDICOMPython):
if a[0] != '_':
setattr(vtk, a, getattr(vtkDICOMPython, a))
m = vtk.vtkDICOMMetaData()
m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100')
v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005))
if v.AsString() != 'ISO_IR 100':
sys.exit(1)
Modify python test for VTK 5.
|
#! /usr/bin/env python2
import sys
import vtk
import vtkDICOMPython
# put everything into the vtk namespace
for a in dir(vtkDICOMPython):
if a[0] != '_':
setattr(vtk, a, getattr(vtkDICOMPython, a))
m = vtk.vtkDICOMMetaData()
if vtk.vtkVersion.GetVTKMajorVersion() < 6:
sys.stderr.write("This test requires VTK 6 or higher.\n");
sys.exit(0)
m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100')
v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005))
if v.AsString() != 'ISO_IR 100':
sys.exit(1)
|
<commit_before>#! /usr/bin/env python2
import sys
import vtk
import vtkDICOMPython
# put everything into the vtk namespace
for a in dir(vtkDICOMPython):
if a[0] != '_':
setattr(vtk, a, getattr(vtkDICOMPython, a))
m = vtk.vtkDICOMMetaData()
m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100')
v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005))
if v.AsString() != 'ISO_IR 100':
sys.exit(1)
<commit_msg>Modify python test for VTK 5.<commit_after>
|
#! /usr/bin/env python2
import sys
import vtk
import vtkDICOMPython
# put everything into the vtk namespace
for a in dir(vtkDICOMPython):
if a[0] != '_':
setattr(vtk, a, getattr(vtkDICOMPython, a))
m = vtk.vtkDICOMMetaData()
if vtk.vtkVersion.GetVTKMajorVersion() < 6:
sys.stderr.write("This test requires VTK 6 or higher.\n");
sys.exit(0)
m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100')
v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005))
if v.AsString() != 'ISO_IR 100':
sys.exit(1)
|
#! /usr/bin/env python2
import sys
import vtk
import vtkDICOMPython
# put everything into the vtk namespace
for a in dir(vtkDICOMPython):
if a[0] != '_':
setattr(vtk, a, getattr(vtkDICOMPython, a))
m = vtk.vtkDICOMMetaData()
m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100')
v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005))
if v.AsString() != 'ISO_IR 100':
sys.exit(1)
Modify python test for VTK 5.#! /usr/bin/env python2
import sys
import vtk
import vtkDICOMPython
# put everything into the vtk namespace
for a in dir(vtkDICOMPython):
if a[0] != '_':
setattr(vtk, a, getattr(vtkDICOMPython, a))
m = vtk.vtkDICOMMetaData()
if vtk.vtkVersion.GetVTKMajorVersion() < 6:
sys.stderr.write("This test requires VTK 6 or higher.\n");
sys.exit(0)
m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100')
v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005))
if v.AsString() != 'ISO_IR 100':
sys.exit(1)
|
<commit_before>#! /usr/bin/env python2
import sys
import vtk
import vtkDICOMPython
# put everything into the vtk namespace
for a in dir(vtkDICOMPython):
if a[0] != '_':
setattr(vtk, a, getattr(vtkDICOMPython, a))
m = vtk.vtkDICOMMetaData()
m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100')
v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005))
if v.AsString() != 'ISO_IR 100':
sys.exit(1)
<commit_msg>Modify python test for VTK 5.<commit_after>#! /usr/bin/env python2
import sys
import vtk
import vtkDICOMPython
# put everything into the vtk namespace
for a in dir(vtkDICOMPython):
if a[0] != '_':
setattr(vtk, a, getattr(vtkDICOMPython, a))
m = vtk.vtkDICOMMetaData()
if vtk.vtkVersion.GetVTKMajorVersion() < 6:
sys.stderr.write("This test requires VTK 6 or higher.\n");
sys.exit(0)
m.SetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005), 'ISO_IR 100')
v = m.GetAttributeValue(vtk.vtkDICOMTag(0x0008, 0x0005))
if v.AsString() != 'ISO_IR 100':
sys.exit(1)
|
5d9fa1838ffe7ffedb59453a0eca520b5f8d5849
|
pyscf/ci/__init__.py
|
pyscf/ci/__init__.py
|
from pyscf.ci.cisd import CISD
|
from pyscf.ci import cisd
def CISD(mf, frozen=[], mo_coeff=None, mo_occ=None):
from pyscf import scf
if isinstance(mf, (scf.uhf.UHF, scf.rohf.ROHF)):
raise NotImplementedError('RO-CISD, UCISD are not available in this pyscf version')
return cisd.CISD(mf, frozen, mo_coeff, mo_occ)
|
Revert accidental changes to ci
|
Revert accidental changes to ci
|
Python
|
apache-2.0
|
gkc1000/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf
|
from pyscf.ci.cisd import CISD
Revert accidental changes to ci
|
from pyscf.ci import cisd
def CISD(mf, frozen=[], mo_coeff=None, mo_occ=None):
from pyscf import scf
if isinstance(mf, (scf.uhf.UHF, scf.rohf.ROHF)):
raise NotImplementedError('RO-CISD, UCISD are not available in this pyscf version')
return cisd.CISD(mf, frozen, mo_coeff, mo_occ)
|
<commit_before>from pyscf.ci.cisd import CISD
<commit_msg>Revert accidental changes to ci<commit_after>
|
from pyscf.ci import cisd
def CISD(mf, frozen=[], mo_coeff=None, mo_occ=None):
from pyscf import scf
if isinstance(mf, (scf.uhf.UHF, scf.rohf.ROHF)):
raise NotImplementedError('RO-CISD, UCISD are not available in this pyscf version')
return cisd.CISD(mf, frozen, mo_coeff, mo_occ)
|
from pyscf.ci.cisd import CISD
Revert accidental changes to cifrom pyscf.ci import cisd
def CISD(mf, frozen=[], mo_coeff=None, mo_occ=None):
from pyscf import scf
if isinstance(mf, (scf.uhf.UHF, scf.rohf.ROHF)):
raise NotImplementedError('RO-CISD, UCISD are not available in this pyscf version')
return cisd.CISD(mf, frozen, mo_coeff, mo_occ)
|
<commit_before>from pyscf.ci.cisd import CISD
<commit_msg>Revert accidental changes to ci<commit_after>from pyscf.ci import cisd
def CISD(mf, frozen=[], mo_coeff=None, mo_occ=None):
from pyscf import scf
if isinstance(mf, (scf.uhf.UHF, scf.rohf.ROHF)):
raise NotImplementedError('RO-CISD, UCISD are not available in this pyscf version')
return cisd.CISD(mf, frozen, mo_coeff, mo_occ)
|
f957a71b65336c403e876fc04eb45779b873c511
|
hapi/events.py
|
hapi/events.py
|
from base import BaseClient
EVENTS_API_VERSION = 'v1'
class EventsClient(BaseClient):
def _get_path(self, subpath):
return 'events/%s/%s' % (EVENTS_API_VERSION, subpath)
def get_events(self, **options):
return self._call('events', **options)
def create_event(self, description, create_date, url, event_type, **options):
event_data = {
'description': description,
'createDate': create_date,
'url': url,
'eventType': event_type
}
return self._call('events', params=event_data, method='POST', **options)
|
from base import BaseClient
EVENTS_API_VERSION = 'v1'
class EventsClient(BaseClient):
def _get_path(self, subpath):
return 'events/%s/%s' % (EVENTS_API_VERSION, subpath)
def get_events(self, **options):
return self._call('events', **options)
def get_event(self, event_id, **options):
return self._call('events/%s' % event_id, **options)
def create_event(self, description, create_date, url, event_type, **options):
event_data = {
'description': description,
'createDate': create_date,
'url': url,
'eventType': event_type
}
return self._call('events', params=event_data, method='POST', **options)
|
Add method to fetch a single event
|
Add method to fetch a single event
|
Python
|
apache-2.0
|
jonathan-s/happy,CurataEng/hapipy,HubSpot/hapipy,CBitLabs/hapipy
|
from base import BaseClient
EVENTS_API_VERSION = 'v1'
class EventsClient(BaseClient):
def _get_path(self, subpath):
return 'events/%s/%s' % (EVENTS_API_VERSION, subpath)
def get_events(self, **options):
return self._call('events', **options)
def create_event(self, description, create_date, url, event_type, **options):
event_data = {
'description': description,
'createDate': create_date,
'url': url,
'eventType': event_type
}
return self._call('events', params=event_data, method='POST', **options)
Add method to fetch a single event
|
from base import BaseClient
EVENTS_API_VERSION = 'v1'
class EventsClient(BaseClient):
def _get_path(self, subpath):
return 'events/%s/%s' % (EVENTS_API_VERSION, subpath)
def get_events(self, **options):
return self._call('events', **options)
def get_event(self, event_id, **options):
return self._call('events/%s' % event_id, **options)
def create_event(self, description, create_date, url, event_type, **options):
event_data = {
'description': description,
'createDate': create_date,
'url': url,
'eventType': event_type
}
return self._call('events', params=event_data, method='POST', **options)
|
<commit_before>from base import BaseClient
EVENTS_API_VERSION = 'v1'
class EventsClient(BaseClient):
def _get_path(self, subpath):
return 'events/%s/%s' % (EVENTS_API_VERSION, subpath)
def get_events(self, **options):
return self._call('events', **options)
def create_event(self, description, create_date, url, event_type, **options):
event_data = {
'description': description,
'createDate': create_date,
'url': url,
'eventType': event_type
}
return self._call('events', params=event_data, method='POST', **options)
<commit_msg>Add method to fetch a single event<commit_after>
|
from base import BaseClient
EVENTS_API_VERSION = 'v1'
class EventsClient(BaseClient):
def _get_path(self, subpath):
return 'events/%s/%s' % (EVENTS_API_VERSION, subpath)
def get_events(self, **options):
return self._call('events', **options)
def get_event(self, event_id, **options):
return self._call('events/%s' % event_id, **options)
def create_event(self, description, create_date, url, event_type, **options):
event_data = {
'description': description,
'createDate': create_date,
'url': url,
'eventType': event_type
}
return self._call('events', params=event_data, method='POST', **options)
|
from base import BaseClient
EVENTS_API_VERSION = 'v1'
class EventsClient(BaseClient):
def _get_path(self, subpath):
return 'events/%s/%s' % (EVENTS_API_VERSION, subpath)
def get_events(self, **options):
return self._call('events', **options)
def create_event(self, description, create_date, url, event_type, **options):
event_data = {
'description': description,
'createDate': create_date,
'url': url,
'eventType': event_type
}
return self._call('events', params=event_data, method='POST', **options)
Add method to fetch a single eventfrom base import BaseClient
EVENTS_API_VERSION = 'v1'
class EventsClient(BaseClient):
def _get_path(self, subpath):
return 'events/%s/%s' % (EVENTS_API_VERSION, subpath)
def get_events(self, **options):
return self._call('events', **options)
def get_event(self, event_id, **options):
return self._call('events/%s' % event_id, **options)
def create_event(self, description, create_date, url, event_type, **options):
event_data = {
'description': description,
'createDate': create_date,
'url': url,
'eventType': event_type
}
return self._call('events', params=event_data, method='POST', **options)
|
<commit_before>from base import BaseClient
EVENTS_API_VERSION = 'v1'
class EventsClient(BaseClient):
def _get_path(self, subpath):
return 'events/%s/%s' % (EVENTS_API_VERSION, subpath)
def get_events(self, **options):
return self._call('events', **options)
def create_event(self, description, create_date, url, event_type, **options):
event_data = {
'description': description,
'createDate': create_date,
'url': url,
'eventType': event_type
}
return self._call('events', params=event_data, method='POST', **options)
<commit_msg>Add method to fetch a single event<commit_after>from base import BaseClient
EVENTS_API_VERSION = 'v1'
class EventsClient(BaseClient):
def _get_path(self, subpath):
return 'events/%s/%s' % (EVENTS_API_VERSION, subpath)
def get_events(self, **options):
return self._call('events', **options)
def get_event(self, event_id, **options):
return self._call('events/%s' % event_id, **options)
def create_event(self, description, create_date, url, event_type, **options):
event_data = {
'description': description,
'createDate': create_date,
'url': url,
'eventType': event_type
}
return self._call('events', params=event_data, method='POST', **options)
|
2428dcc620fae28e3f7f5ed268ff4bffb96c4501
|
owney/managers.py
|
owney/managers.py
|
from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status__exact='delivered')
|
from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status='delivered')
|
Change filter syntax to be more direct.
|
Change filter syntax to be more direct.
|
Python
|
mit
|
JohnSpeno/owney
|
from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status__exact='delivered')
Change filter syntax to be more direct.
|
from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status='delivered')
|
<commit_before>from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status__exact='delivered')
<commit_msg>Change filter syntax to be more direct.<commit_after>
|
from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status='delivered')
|
from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status__exact='delivered')
Change filter syntax to be more direct.from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status='delivered')
|
<commit_before>from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status__exact='delivered')
<commit_msg>Change filter syntax to be more direct.<commit_after>from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status='delivered')
|
37d6c62e510c591a428d43bc6de8f7346de3781f
|
setmagic/__init__.py
|
setmagic/__init__.py
|
from setmagic.wrapper import SettingsWrapper
# Initialize the magic
settings = SettingsWrapper()
|
from setmagic.wrapper import SettingsWrapper
# Initialize the magic
setmagic = SettingsWrapper()
# Support for backwards compatibility
# @TODO: Drop at 0.4
settings = setmagic
|
Rename the built-in wrapper from "settings" to "setmagic"
|
Rename the built-in wrapper from "settings" to "setmagic"
|
Python
|
mit
|
7ws/django-setmagic
|
from setmagic.wrapper import SettingsWrapper
# Initialize the magic
settings = SettingsWrapper()
Rename the built-in wrapper from "settings" to "setmagic"
|
from setmagic.wrapper import SettingsWrapper
# Initialize the magic
setmagic = SettingsWrapper()
# Support for backwards compatibility
# @TODO: Drop at 0.4
settings = setmagic
|
<commit_before>from setmagic.wrapper import SettingsWrapper
# Initialize the magic
settings = SettingsWrapper()
<commit_msg>Rename the built-in wrapper from "settings" to "setmagic"<commit_after>
|
from setmagic.wrapper import SettingsWrapper
# Initialize the magic
setmagic = SettingsWrapper()
# Support for backwards compatibility
# @TODO: Drop at 0.4
settings = setmagic
|
from setmagic.wrapper import SettingsWrapper
# Initialize the magic
settings = SettingsWrapper()
Rename the built-in wrapper from "settings" to "setmagic"from setmagic.wrapper import SettingsWrapper
# Initialize the magic
setmagic = SettingsWrapper()
# Support for backwards compatibility
# @TODO: Drop at 0.4
settings = setmagic
|
<commit_before>from setmagic.wrapper import SettingsWrapper
# Initialize the magic
settings = SettingsWrapper()
<commit_msg>Rename the built-in wrapper from "settings" to "setmagic"<commit_after>from setmagic.wrapper import SettingsWrapper
# Initialize the magic
setmagic = SettingsWrapper()
# Support for backwards compatibility
# @TODO: Drop at 0.4
settings = setmagic
|
7eb71da0822cdf6ea724a87662952fe90e65a6f6
|
UM/Operations/ScaleOperation.py
|
UM/Operations/ScaleOperation.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.SceneNode import SceneNode
class ScaleOperation(Operation.Operation):
def __init__(self, node, scale, **kwargs):
super().__init__()
self._node = node
self._old_scale = node.getScale()
self._set_scale = kwargs.get("set_scale", False)
self._scale = scale
def undo(self):
self._node.setScale(self._old_scale)
def redo(self):
if self._set_scale:
self._node.setScale(self._scale)
else:
self._node.scale(self._scale, SceneNode.TransformSpace.World)
def mergeWith(self, other):
if type(other) is not ScaleOperation:
return False
if other._node != self._node:
return False
if other._set_scale and not self._set_scale:
return False
op = ScaleOperation(self._node, self._scale)
op._old_scale = other._old_scale
return op
def __repr__(self):
return "ScaleOperation(node = {0}, scale={1})".format(self._node, self._scale)
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.SceneNode import SceneNode
class ScaleOperation(Operation.Operation):
def __init__(self, node, scale, **kwargs):
super().__init__()
self._node = node
self._old_scale = node.getScale()
self._set_scale = kwargs.get("set_scale", False)
self._add_scale = kwargs.get("add_scale", False)
self._scale = scale
def undo(self):
self._node.setScale(self._old_scale)
def redo(self):
if self._set_scale:
self._node.setScale(self._scale)
elif self._add_scale:
self._node.setScale(self._node.getScale() + self._scale)
else:
self._node.scale(self._scale, SceneNode.TransformSpace.World)
def mergeWith(self, other):
if type(other) is not ScaleOperation:
return False
if other._node != self._node:
return False
if other._set_scale and not self._set_scale:
return False
if other._add_scale and not self._add_scale:
return False
op = ScaleOperation(self._node, self._scale)
op._old_scale = other._old_scale
return op
def __repr__(self):
return "ScaleOperation(node = {0}, scale={1})".format(self._node, self._scale)
|
Convert ScaleTool to use add_scale
|
Convert ScaleTool to use add_scale
Contributes to Ultimaker/Uranium/#73
Contributes to Ultimaker/Cura/#493
contributes to #CURA-287
contributes to #CURA-235
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.SceneNode import SceneNode
class ScaleOperation(Operation.Operation):
def __init__(self, node, scale, **kwargs):
super().__init__()
self._node = node
self._old_scale = node.getScale()
self._set_scale = kwargs.get("set_scale", False)
self._scale = scale
def undo(self):
self._node.setScale(self._old_scale)
def redo(self):
if self._set_scale:
self._node.setScale(self._scale)
else:
self._node.scale(self._scale, SceneNode.TransformSpace.World)
def mergeWith(self, other):
if type(other) is not ScaleOperation:
return False
if other._node != self._node:
return False
if other._set_scale and not self._set_scale:
return False
op = ScaleOperation(self._node, self._scale)
op._old_scale = other._old_scale
return op
def __repr__(self):
return "ScaleOperation(node = {0}, scale={1})".format(self._node, self._scale)
Convert ScaleTool to use add_scale
Contributes to Ultimaker/Uranium/#73
Contributes to Ultimaker/Cura/#493
contributes to #CURA-287
contributes to #CURA-235
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.SceneNode import SceneNode
class ScaleOperation(Operation.Operation):
def __init__(self, node, scale, **kwargs):
super().__init__()
self._node = node
self._old_scale = node.getScale()
self._set_scale = kwargs.get("set_scale", False)
self._add_scale = kwargs.get("add_scale", False)
self._scale = scale
def undo(self):
self._node.setScale(self._old_scale)
def redo(self):
if self._set_scale:
self._node.setScale(self._scale)
elif self._add_scale:
self._node.setScale(self._node.getScale() + self._scale)
else:
self._node.scale(self._scale, SceneNode.TransformSpace.World)
def mergeWith(self, other):
if type(other) is not ScaleOperation:
return False
if other._node != self._node:
return False
if other._set_scale and not self._set_scale:
return False
if other._add_scale and not self._add_scale:
return False
op = ScaleOperation(self._node, self._scale)
op._old_scale = other._old_scale
return op
def __repr__(self):
return "ScaleOperation(node = {0}, scale={1})".format(self._node, self._scale)
|
<commit_before># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.SceneNode import SceneNode
class ScaleOperation(Operation.Operation):
def __init__(self, node, scale, **kwargs):
super().__init__()
self._node = node
self._old_scale = node.getScale()
self._set_scale = kwargs.get("set_scale", False)
self._scale = scale
def undo(self):
self._node.setScale(self._old_scale)
def redo(self):
if self._set_scale:
self._node.setScale(self._scale)
else:
self._node.scale(self._scale, SceneNode.TransformSpace.World)
def mergeWith(self, other):
if type(other) is not ScaleOperation:
return False
if other._node != self._node:
return False
if other._set_scale and not self._set_scale:
return False
op = ScaleOperation(self._node, self._scale)
op._old_scale = other._old_scale
return op
def __repr__(self):
return "ScaleOperation(node = {0}, scale={1})".format(self._node, self._scale)
<commit_msg>Convert ScaleTool to use add_scale
Contributes to Ultimaker/Uranium/#73
Contributes to Ultimaker/Cura/#493
contributes to #CURA-287
contributes to #CURA-235<commit_after>
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.SceneNode import SceneNode
class ScaleOperation(Operation.Operation):
def __init__(self, node, scale, **kwargs):
super().__init__()
self._node = node
self._old_scale = node.getScale()
self._set_scale = kwargs.get("set_scale", False)
self._add_scale = kwargs.get("add_scale", False)
self._scale = scale
def undo(self):
self._node.setScale(self._old_scale)
def redo(self):
if self._set_scale:
self._node.setScale(self._scale)
elif self._add_scale:
self._node.setScale(self._node.getScale() + self._scale)
else:
self._node.scale(self._scale, SceneNode.TransformSpace.World)
def mergeWith(self, other):
if type(other) is not ScaleOperation:
return False
if other._node != self._node:
return False
if other._set_scale and not self._set_scale:
return False
if other._add_scale and not self._add_scale:
return False
op = ScaleOperation(self._node, self._scale)
op._old_scale = other._old_scale
return op
def __repr__(self):
return "ScaleOperation(node = {0}, scale={1})".format(self._node, self._scale)
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.SceneNode import SceneNode
class ScaleOperation(Operation.Operation):
def __init__(self, node, scale, **kwargs):
super().__init__()
self._node = node
self._old_scale = node.getScale()
self._set_scale = kwargs.get("set_scale", False)
self._scale = scale
def undo(self):
self._node.setScale(self._old_scale)
def redo(self):
if self._set_scale:
self._node.setScale(self._scale)
else:
self._node.scale(self._scale, SceneNode.TransformSpace.World)
def mergeWith(self, other):
if type(other) is not ScaleOperation:
return False
if other._node != self._node:
return False
if other._set_scale and not self._set_scale:
return False
op = ScaleOperation(self._node, self._scale)
op._old_scale = other._old_scale
return op
def __repr__(self):
return "ScaleOperation(node = {0}, scale={1})".format(self._node, self._scale)
Convert ScaleTool to use add_scale
Contributes to Ultimaker/Uranium/#73
Contributes to Ultimaker/Cura/#493
contributes to #CURA-287
contributes to #CURA-235# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.SceneNode import SceneNode
class ScaleOperation(Operation.Operation):
def __init__(self, node, scale, **kwargs):
super().__init__()
self._node = node
self._old_scale = node.getScale()
self._set_scale = kwargs.get("set_scale", False)
self._add_scale = kwargs.get("add_scale", False)
self._scale = scale
def undo(self):
self._node.setScale(self._old_scale)
def redo(self):
if self._set_scale:
self._node.setScale(self._scale)
elif self._add_scale:
self._node.setScale(self._node.getScale() + self._scale)
else:
self._node.scale(self._scale, SceneNode.TransformSpace.World)
def mergeWith(self, other):
if type(other) is not ScaleOperation:
return False
if other._node != self._node:
return False
if other._set_scale and not self._set_scale:
return False
if other._add_scale and not self._add_scale:
return False
op = ScaleOperation(self._node, self._scale)
op._old_scale = other._old_scale
return op
def __repr__(self):
return "ScaleOperation(node = {0}, scale={1})".format(self._node, self._scale)
|
<commit_before># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.SceneNode import SceneNode
class ScaleOperation(Operation.Operation):
def __init__(self, node, scale, **kwargs):
super().__init__()
self._node = node
self._old_scale = node.getScale()
self._set_scale = kwargs.get("set_scale", False)
self._scale = scale
def undo(self):
self._node.setScale(self._old_scale)
def redo(self):
if self._set_scale:
self._node.setScale(self._scale)
else:
self._node.scale(self._scale, SceneNode.TransformSpace.World)
def mergeWith(self, other):
if type(other) is not ScaleOperation:
return False
if other._node != self._node:
return False
if other._set_scale and not self._set_scale:
return False
op = ScaleOperation(self._node, self._scale)
op._old_scale = other._old_scale
return op
def __repr__(self):
return "ScaleOperation(node = {0}, scale={1})".format(self._node, self._scale)
<commit_msg>Convert ScaleTool to use add_scale
Contributes to Ultimaker/Uranium/#73
Contributes to Ultimaker/Cura/#493
contributes to #CURA-287
contributes to #CURA-235<commit_after># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
from UM.Scene.SceneNode import SceneNode
class ScaleOperation(Operation.Operation):
def __init__(self, node, scale, **kwargs):
super().__init__()
self._node = node
self._old_scale = node.getScale()
self._set_scale = kwargs.get("set_scale", False)
self._add_scale = kwargs.get("add_scale", False)
self._scale = scale
def undo(self):
self._node.setScale(self._old_scale)
def redo(self):
if self._set_scale:
self._node.setScale(self._scale)
elif self._add_scale:
self._node.setScale(self._node.getScale() + self._scale)
else:
self._node.scale(self._scale, SceneNode.TransformSpace.World)
def mergeWith(self, other):
if type(other) is not ScaleOperation:
return False
if other._node != self._node:
return False
if other._set_scale and not self._set_scale:
return False
if other._add_scale and not self._add_scale:
return False
op = ScaleOperation(self._node, self._scale)
op._old_scale = other._old_scale
return op
def __repr__(self):
return "ScaleOperation(node = {0}, scale={1})".format(self._node, self._scale)
|
920c1cd03645bd04df59bdb1f52aab07c710746b
|
fabtools/__init__.py
|
fabtools/__init__.py
|
# Keep imports sorted alphabetically
import fabtools.arch
import fabtools.cron
import fabtools.deb
import fabtools.files
import fabtools.git
import fabtools.group
import fabtools.mysql
import fabtools.network
import fabtools.nginx
import fabtools.nodejs
import fabtools.openvz
import fabtools.pkg
import fabtools.postgres
import fabtools.python
import fabtools.python_distribute
import fabtools.rpm
import fabtools.service
import fabtools.shorewall
import fabtools.ssh
import fabtools.supervisor
import fabtools.system
import fabtools.user
import fabtools.require
icanhaz = require
|
# Keep imports sorted alphabetically
import fabtools.arch
import fabtools.cron
import fabtools.deb
import fabtools.disk
import fabtools.files
import fabtools.git
import fabtools.group
import fabtools.mysql
import fabtools.network
import fabtools.nginx
import fabtools.nodejs
import fabtools.openvz
import fabtools.pkg
import fabtools.postgres
import fabtools.python
import fabtools.python_distribute
import fabtools.rpm
import fabtools.service
import fabtools.shorewall
import fabtools.ssh
import fabtools.supervisor
import fabtools.system
import fabtools.user
import fabtools.require
icanhaz = require
|
Add missing import for new disk module
|
Add missing import for new disk module
|
Python
|
bsd-2-clause
|
ahnjungho/fabtools,badele/fabtools,wagigi/fabtools-python,fabtools/fabtools,davidcaste/fabtools,AMOSoft/fabtools,prologic/fabtools,ronnix/fabtools,n0n0x/fabtools-python,pombredanne/fabtools,sociateru/fabtools,hagai26/fabtools,bitmonk/fabtools
|
# Keep imports sorted alphabetically
import fabtools.arch
import fabtools.cron
import fabtools.deb
import fabtools.files
import fabtools.git
import fabtools.group
import fabtools.mysql
import fabtools.network
import fabtools.nginx
import fabtools.nodejs
import fabtools.openvz
import fabtools.pkg
import fabtools.postgres
import fabtools.python
import fabtools.python_distribute
import fabtools.rpm
import fabtools.service
import fabtools.shorewall
import fabtools.ssh
import fabtools.supervisor
import fabtools.system
import fabtools.user
import fabtools.require
icanhaz = require
Add missing import for new disk module
|
# Keep imports sorted alphabetically
import fabtools.arch
import fabtools.cron
import fabtools.deb
import fabtools.disk
import fabtools.files
import fabtools.git
import fabtools.group
import fabtools.mysql
import fabtools.network
import fabtools.nginx
import fabtools.nodejs
import fabtools.openvz
import fabtools.pkg
import fabtools.postgres
import fabtools.python
import fabtools.python_distribute
import fabtools.rpm
import fabtools.service
import fabtools.shorewall
import fabtools.ssh
import fabtools.supervisor
import fabtools.system
import fabtools.user
import fabtools.require
icanhaz = require
|
<commit_before># Keep imports sorted alphabetically
import fabtools.arch
import fabtools.cron
import fabtools.deb
import fabtools.files
import fabtools.git
import fabtools.group
import fabtools.mysql
import fabtools.network
import fabtools.nginx
import fabtools.nodejs
import fabtools.openvz
import fabtools.pkg
import fabtools.postgres
import fabtools.python
import fabtools.python_distribute
import fabtools.rpm
import fabtools.service
import fabtools.shorewall
import fabtools.ssh
import fabtools.supervisor
import fabtools.system
import fabtools.user
import fabtools.require
icanhaz = require
<commit_msg>Add missing import for new disk module<commit_after>
|
# Keep imports sorted alphabetically
import fabtools.arch
import fabtools.cron
import fabtools.deb
import fabtools.disk
import fabtools.files
import fabtools.git
import fabtools.group
import fabtools.mysql
import fabtools.network
import fabtools.nginx
import fabtools.nodejs
import fabtools.openvz
import fabtools.pkg
import fabtools.postgres
import fabtools.python
import fabtools.python_distribute
import fabtools.rpm
import fabtools.service
import fabtools.shorewall
import fabtools.ssh
import fabtools.supervisor
import fabtools.system
import fabtools.user
import fabtools.require
icanhaz = require
|
# Keep imports sorted alphabetically
import fabtools.arch
import fabtools.cron
import fabtools.deb
import fabtools.files
import fabtools.git
import fabtools.group
import fabtools.mysql
import fabtools.network
import fabtools.nginx
import fabtools.nodejs
import fabtools.openvz
import fabtools.pkg
import fabtools.postgres
import fabtools.python
import fabtools.python_distribute
import fabtools.rpm
import fabtools.service
import fabtools.shorewall
import fabtools.ssh
import fabtools.supervisor
import fabtools.system
import fabtools.user
import fabtools.require
icanhaz = require
Add missing import for new disk module# Keep imports sorted alphabetically
import fabtools.arch
import fabtools.cron
import fabtools.deb
import fabtools.disk
import fabtools.files
import fabtools.git
import fabtools.group
import fabtools.mysql
import fabtools.network
import fabtools.nginx
import fabtools.nodejs
import fabtools.openvz
import fabtools.pkg
import fabtools.postgres
import fabtools.python
import fabtools.python_distribute
import fabtools.rpm
import fabtools.service
import fabtools.shorewall
import fabtools.ssh
import fabtools.supervisor
import fabtools.system
import fabtools.user
import fabtools.require
icanhaz = require
|
<commit_before># Keep imports sorted alphabetically
import fabtools.arch
import fabtools.cron
import fabtools.deb
import fabtools.files
import fabtools.git
import fabtools.group
import fabtools.mysql
import fabtools.network
import fabtools.nginx
import fabtools.nodejs
import fabtools.openvz
import fabtools.pkg
import fabtools.postgres
import fabtools.python
import fabtools.python_distribute
import fabtools.rpm
import fabtools.service
import fabtools.shorewall
import fabtools.ssh
import fabtools.supervisor
import fabtools.system
import fabtools.user
import fabtools.require
icanhaz = require
<commit_msg>Add missing import for new disk module<commit_after># Keep imports sorted alphabetically
import fabtools.arch
import fabtools.cron
import fabtools.deb
import fabtools.disk
import fabtools.files
import fabtools.git
import fabtools.group
import fabtools.mysql
import fabtools.network
import fabtools.nginx
import fabtools.nodejs
import fabtools.openvz
import fabtools.pkg
import fabtools.postgres
import fabtools.python
import fabtools.python_distribute
import fabtools.rpm
import fabtools.service
import fabtools.shorewall
import fabtools.ssh
import fabtools.supervisor
import fabtools.system
import fabtools.user
import fabtools.require
icanhaz = require
|
7d28400cc11fec86f542f1a0b03df6b6ed0086ea
|
dipy/stats/__init__.py
|
dipy/stats/__init__.py
|
# code support tractometric statistical analysis for dipy
|
# code support tractometric statistical analysis for dipy
import warnings
w_string = "The `dipy.stats` module is still under heavy development "
w_string += "and functionality, as well as the API is likely to change "
w_string += "in future versions of the software"
warnings.warn(w_string)
|
Add a warning about future changes that will happen in dipy.stats.
|
Add a warning about future changes that will happen in dipy.stats.
|
Python
|
bsd-3-clause
|
FrancoisRheaultUS/dipy,FrancoisRheaultUS/dipy
|
# code support tractometric statistical analysis for dipy
Add a warning about future changes that will happen in dipy.stats.
|
# code support tractometric statistical analysis for dipy
import warnings
w_string = "The `dipy.stats` module is still under heavy development "
w_string += "and functionality, as well as the API is likely to change "
w_string += "in future versions of the software"
warnings.warn(w_string)
|
<commit_before># code support tractometric statistical analysis for dipy
<commit_msg>Add a warning about future changes that will happen in dipy.stats.<commit_after>
|
# code support tractometric statistical analysis for dipy
import warnings
w_string = "The `dipy.stats` module is still under heavy development "
w_string += "and functionality, as well as the API is likely to change "
w_string += "in future versions of the software"
warnings.warn(w_string)
|
# code support tractometric statistical analysis for dipy
Add a warning about future changes that will happen in dipy.stats.# code support tractometric statistical analysis for dipy
import warnings
w_string = "The `dipy.stats` module is still under heavy development "
w_string += "and functionality, as well as the API is likely to change "
w_string += "in future versions of the software"
warnings.warn(w_string)
|
<commit_before># code support tractometric statistical analysis for dipy
<commit_msg>Add a warning about future changes that will happen in dipy.stats.<commit_after># code support tractometric statistical analysis for dipy
import warnings
w_string = "The `dipy.stats` module is still under heavy development "
w_string += "and functionality, as well as the API is likely to change "
w_string += "in future versions of the software"
warnings.warn(w_string)
|
887c90bbe82fb0ddc85af0a2a9a294bd38677bda
|
test/lib/test_util.py
|
test/lib/test_util.py
|
import unittest
import amara
from amara.lib import util
class Test_util(unittest.TestCase):
def test_trim_word_count(self):
x = amara.parse('<a>one two <b>three four </b><c>five <d>six seven</d> eight</c> nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
if __name__ == '__main__':
raise SystemExit("Use nosetests")
|
import unittest
import amara
from amara.lib import util
class Test_trim_word_count(unittest.TestCase):
'Testing amara.lib.util.trim_word_count'
def test_flat_doc(self):
'Input doc with just top-level text'
x = amara.parse('<a>one two three four five six seven eight nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
def test_nested_doc(self):
'Input doc with text in nested elements'
x = amara.parse('<a>one two <b>three four </b><c>five <d>six seven</d> eight</c> nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
if __name__ == '__main__':
raise SystemExit("Use nosetests (nosetests path/to/test/file)")
|
Expand trim_word_count test a bit
|
Expand trim_word_count test a bit
|
Python
|
apache-2.0
|
zepheira/amara,zepheira/amara,zepheira/amara,zepheira/amara,zepheira/amara,zepheira/amara
|
import unittest
import amara
from amara.lib import util
class Test_util(unittest.TestCase):
def test_trim_word_count(self):
x = amara.parse('<a>one two <b>three four </b><c>five <d>six seven</d> eight</c> nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
if __name__ == '__main__':
raise SystemExit("Use nosetests")
Expand trim_word_count test a bit
|
import unittest
import amara
from amara.lib import util
class Test_trim_word_count(unittest.TestCase):
'Testing amara.lib.util.trim_word_count'
def test_flat_doc(self):
'Input doc with just top-level text'
x = amara.parse('<a>one two three four five six seven eight nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
def test_nested_doc(self):
'Input doc with text in nested elements'
x = amara.parse('<a>one two <b>three four </b><c>five <d>six seven</d> eight</c> nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
if __name__ == '__main__':
raise SystemExit("Use nosetests (nosetests path/to/test/file)")
|
<commit_before>
import unittest
import amara
from amara.lib import util
class Test_util(unittest.TestCase):
def test_trim_word_count(self):
x = amara.parse('<a>one two <b>three four </b><c>five <d>six seven</d> eight</c> nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
if __name__ == '__main__':
raise SystemExit("Use nosetests")
<commit_msg>Expand trim_word_count test a bit<commit_after>
|
import unittest
import amara
from amara.lib import util
class Test_trim_word_count(unittest.TestCase):
'Testing amara.lib.util.trim_word_count'
def test_flat_doc(self):
'Input doc with just top-level text'
x = amara.parse('<a>one two three four five six seven eight nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
def test_nested_doc(self):
'Input doc with text in nested elements'
x = amara.parse('<a>one two <b>three four </b><c>five <d>six seven</d> eight</c> nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
if __name__ == '__main__':
raise SystemExit("Use nosetests (nosetests path/to/test/file)")
|
import unittest
import amara
from amara.lib import util
class Test_util(unittest.TestCase):
def test_trim_word_count(self):
x = amara.parse('<a>one two <b>three four </b><c>five <d>six seven</d> eight</c> nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
if __name__ == '__main__':
raise SystemExit("Use nosetests")
Expand trim_word_count test a bitimport unittest
import amara
from amara.lib import util
class Test_trim_word_count(unittest.TestCase):
'Testing amara.lib.util.trim_word_count'
def test_flat_doc(self):
'Input doc with just top-level text'
x = amara.parse('<a>one two three four five six seven eight nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
def test_nested_doc(self):
'Input doc with text in nested elements'
x = amara.parse('<a>one two <b>three four </b><c>five <d>six seven</d> eight</c> nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
if __name__ == '__main__':
raise SystemExit("Use nosetests (nosetests path/to/test/file)")
|
<commit_before>
import unittest
import amara
from amara.lib import util
class Test_util(unittest.TestCase):
def test_trim_word_count(self):
x = amara.parse('<a>one two <b>three four </b><c>five <d>six seven</d> eight</c> nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
if __name__ == '__main__':
raise SystemExit("Use nosetests")
<commit_msg>Expand trim_word_count test a bit<commit_after>import unittest
import amara
from amara.lib import util
class Test_trim_word_count(unittest.TestCase):
'Testing amara.lib.util.trim_word_count'
def test_flat_doc(self):
'Input doc with just top-level text'
x = amara.parse('<a>one two three four five six seven eight nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
def test_nested_doc(self):
'Input doc with text in nested elements'
x = amara.parse('<a>one two <b>three four </b><c>five <d>six seven</d> eight</c> nine</a>')
for i in range(1, 11):
trimmed_tree = util.trim_word_count(x, i)
word_count = len(trimmed_tree.xml_select(u'string(.)').split())
self.assertEquals(word_count, min(i, 9))
if __name__ == '__main__':
raise SystemExit("Use nosetests (nosetests path/to/test/file)")
|
19fd2795e1cd909bb969a4c4e514d8cb1fd884f5
|
plugins/XmlMaterialProfile/__init__.py
|
plugins/XmlMaterialProfile/__init__.py
|
# Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
|
# Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"type": "material",
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
|
Mark XmlMaterialProfile as type "material" so the import/export code can find it
|
Mark XmlMaterialProfile as type "material" so the import/export code can find it
Contributes to CURA-341
|
Python
|
agpl-3.0
|
senttech/Cura,fieldOfView/Cura,totalretribution/Cura,hmflash/Cura,Curahelper/Cura,totalretribution/Cura,fieldOfView/Cura,hmflash/Cura,ynotstartups/Wanhao,ynotstartups/Wanhao,senttech/Cura,Curahelper/Cura
|
# Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
Mark XmlMaterialProfile as type "material" so the import/export code can find it
Contributes to CURA-341
|
# Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"type": "material",
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
|
<commit_before># Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
<commit_msg>Mark XmlMaterialProfile as type "material" so the import/export code can find it
Contributes to CURA-341<commit_after>
|
# Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"type": "material",
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
|
# Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
Mark XmlMaterialProfile as type "material" so the import/export code can find it
Contributes to CURA-341# Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"type": "material",
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
|
<commit_before># Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
<commit_msg>Mark XmlMaterialProfile as type "material" so the import/export code can find it
Contributes to CURA-341<commit_after># Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import XmlMaterialProfile
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Material Profiles"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides capabilities to read and write XML-based material profiles."),
"api": 3
},
"settings_container": {
"type": "material",
"mimetype": "application/x-ultimaker-material-profile"
}
}
def register(app):
mime_type = MimeType(
name = "application/x-ultimaker-material-profile",
comment = "Ultimaker Material Profile",
suffixes = [ "xml.fdm_material" ]
)
MimeTypeDatabase.addMimeType(mime_type)
return { "settings_container": XmlMaterialProfile.XmlMaterialProfile("default_xml_material_profile") }
|
79c38342193a1ae9a2f12e4b45ccc30cda212c18
|
indico/modules/events/papers/settings.py
|
indico/modules/events/papers/settings.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.core.settings.converters import DatetimeConverter
from indico.modules.events.settings import EventSettingsProxy
paper_reviewing_settings = EventSettingsProxy('paper_reviewing', {
'start_dt': None,
'end_dt': None,
'content_reviewing_enabled': True,
'layout_reviewing_enabled': False,
'judge_deadline': None,
'layout_reviewer_deadline': None,
'content_reviewer_deadline': None,
}, converters={
'start_dt': DatetimeConverter,
'end_dt': DatetimeConverter,
'judge_deadline': DatetimeConverter,
'layout_reviewer_deadline': DatetimeConverter,
'content_reviewer_deadline': DatetimeConverter,
})
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.core.settings.converters import DatetimeConverter
from indico.modules.events.settings import EventSettingsProxy
paper_reviewing_settings = EventSettingsProxy('paper_reviewing', {
'start_dt': None,
'end_dt': None,
'enforce_deadlines': False,
'content_reviewing_enabled': True,
'layout_reviewing_enabled': False,
'judge_deadline': None,
'layout_reviewer_deadline': None,
'content_reviewer_deadline': None,
}, converters={
'start_dt': DatetimeConverter,
'end_dt': DatetimeConverter,
'judge_deadline': DatetimeConverter,
'layout_reviewer_deadline': DatetimeConverter,
'content_reviewer_deadline': DatetimeConverter,
})
|
Add setting to optionally enforce PR deadlines
|
Add setting to optionally enforce PR deadlines
|
Python
|
mit
|
OmeGak/indico,ThiefMaster/indico,OmeGak/indico,ThiefMaster/indico,pferreir/indico,pferreir/indico,indico/indico,pferreir/indico,OmeGak/indico,indico/indico,ThiefMaster/indico,mvidalgarcia/indico,DirkHoffmann/indico,pferreir/indico,mic4ael/indico,mic4ael/indico,mvidalgarcia/indico,mic4ael/indico,ThiefMaster/indico,DirkHoffmann/indico,mvidalgarcia/indico,indico/indico,mic4ael/indico,mvidalgarcia/indico,OmeGak/indico,DirkHoffmann/indico,DirkHoffmann/indico,indico/indico
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.core.settings.converters import DatetimeConverter
from indico.modules.events.settings import EventSettingsProxy
paper_reviewing_settings = EventSettingsProxy('paper_reviewing', {
'start_dt': None,
'end_dt': None,
'content_reviewing_enabled': True,
'layout_reviewing_enabled': False,
'judge_deadline': None,
'layout_reviewer_deadline': None,
'content_reviewer_deadline': None,
}, converters={
'start_dt': DatetimeConverter,
'end_dt': DatetimeConverter,
'judge_deadline': DatetimeConverter,
'layout_reviewer_deadline': DatetimeConverter,
'content_reviewer_deadline': DatetimeConverter,
})
Add setting to optionally enforce PR deadlines
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.core.settings.converters import DatetimeConverter
from indico.modules.events.settings import EventSettingsProxy
paper_reviewing_settings = EventSettingsProxy('paper_reviewing', {
'start_dt': None,
'end_dt': None,
'enforce_deadlines': False,
'content_reviewing_enabled': True,
'layout_reviewing_enabled': False,
'judge_deadline': None,
'layout_reviewer_deadline': None,
'content_reviewer_deadline': None,
}, converters={
'start_dt': DatetimeConverter,
'end_dt': DatetimeConverter,
'judge_deadline': DatetimeConverter,
'layout_reviewer_deadline': DatetimeConverter,
'content_reviewer_deadline': DatetimeConverter,
})
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.core.settings.converters import DatetimeConverter
from indico.modules.events.settings import EventSettingsProxy
paper_reviewing_settings = EventSettingsProxy('paper_reviewing', {
'start_dt': None,
'end_dt': None,
'content_reviewing_enabled': True,
'layout_reviewing_enabled': False,
'judge_deadline': None,
'layout_reviewer_deadline': None,
'content_reviewer_deadline': None,
}, converters={
'start_dt': DatetimeConverter,
'end_dt': DatetimeConverter,
'judge_deadline': DatetimeConverter,
'layout_reviewer_deadline': DatetimeConverter,
'content_reviewer_deadline': DatetimeConverter,
})
<commit_msg>Add setting to optionally enforce PR deadlines<commit_after>
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.core.settings.converters import DatetimeConverter
from indico.modules.events.settings import EventSettingsProxy
paper_reviewing_settings = EventSettingsProxy('paper_reviewing', {
'start_dt': None,
'end_dt': None,
'enforce_deadlines': False,
'content_reviewing_enabled': True,
'layout_reviewing_enabled': False,
'judge_deadline': None,
'layout_reviewer_deadline': None,
'content_reviewer_deadline': None,
}, converters={
'start_dt': DatetimeConverter,
'end_dt': DatetimeConverter,
'judge_deadline': DatetimeConverter,
'layout_reviewer_deadline': DatetimeConverter,
'content_reviewer_deadline': DatetimeConverter,
})
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.core.settings.converters import DatetimeConverter
from indico.modules.events.settings import EventSettingsProxy
paper_reviewing_settings = EventSettingsProxy('paper_reviewing', {
'start_dt': None,
'end_dt': None,
'content_reviewing_enabled': True,
'layout_reviewing_enabled': False,
'judge_deadline': None,
'layout_reviewer_deadline': None,
'content_reviewer_deadline': None,
}, converters={
'start_dt': DatetimeConverter,
'end_dt': DatetimeConverter,
'judge_deadline': DatetimeConverter,
'layout_reviewer_deadline': DatetimeConverter,
'content_reviewer_deadline': DatetimeConverter,
})
Add setting to optionally enforce PR deadlines# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.core.settings.converters import DatetimeConverter
from indico.modules.events.settings import EventSettingsProxy
paper_reviewing_settings = EventSettingsProxy('paper_reviewing', {
'start_dt': None,
'end_dt': None,
'enforce_deadlines': False,
'content_reviewing_enabled': True,
'layout_reviewing_enabled': False,
'judge_deadline': None,
'layout_reviewer_deadline': None,
'content_reviewer_deadline': None,
}, converters={
'start_dt': DatetimeConverter,
'end_dt': DatetimeConverter,
'judge_deadline': DatetimeConverter,
'layout_reviewer_deadline': DatetimeConverter,
'content_reviewer_deadline': DatetimeConverter,
})
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.core.settings.converters import DatetimeConverter
from indico.modules.events.settings import EventSettingsProxy
paper_reviewing_settings = EventSettingsProxy('paper_reviewing', {
'start_dt': None,
'end_dt': None,
'content_reviewing_enabled': True,
'layout_reviewing_enabled': False,
'judge_deadline': None,
'layout_reviewer_deadline': None,
'content_reviewer_deadline': None,
}, converters={
'start_dt': DatetimeConverter,
'end_dt': DatetimeConverter,
'judge_deadline': DatetimeConverter,
'layout_reviewer_deadline': DatetimeConverter,
'content_reviewer_deadline': DatetimeConverter,
})
<commit_msg>Add setting to optionally enforce PR deadlines<commit_after># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.core.settings.converters import DatetimeConverter
from indico.modules.events.settings import EventSettingsProxy
paper_reviewing_settings = EventSettingsProxy('paper_reviewing', {
'start_dt': None,
'end_dt': None,
'enforce_deadlines': False,
'content_reviewing_enabled': True,
'layout_reviewing_enabled': False,
'judge_deadline': None,
'layout_reviewer_deadline': None,
'content_reviewer_deadline': None,
}, converters={
'start_dt': DatetimeConverter,
'end_dt': DatetimeConverter,
'judge_deadline': DatetimeConverter,
'layout_reviewer_deadline': DatetimeConverter,
'content_reviewer_deadline': DatetimeConverter,
})
|
11a19916c084b5ceee32180988eee9c2e1ebff05
|
django_payzen/admin.py
|
django_payzen/admin.py
|
from django.contrib import admin
from . import models
class PaymentRequestAdmin(admin.ModelAdmin):
model = models.PaymentRequest
list_display = ("vads_amount", "get_vads_currency_display",
"vads_trans_id", "vads_trans_date")
class PaymentResponseAdmin(admin.ModelAdmin):
model = models.PaymentResponse
list_display = ("vads_amount", "get_vads_currency_display",
"vads_trans_id", "vads_trans_date",
"vads_operation_type")
admin.site.register(models.PaymentRequest, PaymentRequestAdmin)
admin.site.register(models.PaymentResponse, PaymentResponseAdmin)
|
from django.contrib import admin
from . import models
class PaymentRequestAdmin(admin.ModelAdmin):
model = models.PaymentRequest
list_display = ("vads_trans_id", "vads_trans_date",
"vads_amount", "get_vads_currency_display")
class PaymentResponseAdmin(admin.ModelAdmin):
model = models.PaymentResponse
list_display = ("vads_trans_id", "vads_trans_date",
"vads_amount", "get_vads_currency_display",
"vads_operation_type")
admin.site.register(models.PaymentRequest, PaymentRequestAdmin)
admin.site.register(models.PaymentResponse, PaymentResponseAdmin)
|
Reorder list dispay in RequestPayment and RequestResponse lists.
|
Reorder list dispay in RequestPayment and RequestResponse lists.
|
Python
|
mit
|
zehome/django-payzen,bsvetchine/django-payzen,zehome/django-payzen,bsvetchine/django-payzen
|
from django.contrib import admin
from . import models
class PaymentRequestAdmin(admin.ModelAdmin):
model = models.PaymentRequest
list_display = ("vads_amount", "get_vads_currency_display",
"vads_trans_id", "vads_trans_date")
class PaymentResponseAdmin(admin.ModelAdmin):
model = models.PaymentResponse
list_display = ("vads_amount", "get_vads_currency_display",
"vads_trans_id", "vads_trans_date",
"vads_operation_type")
admin.site.register(models.PaymentRequest, PaymentRequestAdmin)
admin.site.register(models.PaymentResponse, PaymentResponseAdmin)
Reorder list dispay in RequestPayment and RequestResponse lists.
|
from django.contrib import admin
from . import models
class PaymentRequestAdmin(admin.ModelAdmin):
model = models.PaymentRequest
list_display = ("vads_trans_id", "vads_trans_date",
"vads_amount", "get_vads_currency_display")
class PaymentResponseAdmin(admin.ModelAdmin):
model = models.PaymentResponse
list_display = ("vads_trans_id", "vads_trans_date",
"vads_amount", "get_vads_currency_display",
"vads_operation_type")
admin.site.register(models.PaymentRequest, PaymentRequestAdmin)
admin.site.register(models.PaymentResponse, PaymentResponseAdmin)
|
<commit_before>from django.contrib import admin
from . import models
class PaymentRequestAdmin(admin.ModelAdmin):
model = models.PaymentRequest
list_display = ("vads_amount", "get_vads_currency_display",
"vads_trans_id", "vads_trans_date")
class PaymentResponseAdmin(admin.ModelAdmin):
model = models.PaymentResponse
list_display = ("vads_amount", "get_vads_currency_display",
"vads_trans_id", "vads_trans_date",
"vads_operation_type")
admin.site.register(models.PaymentRequest, PaymentRequestAdmin)
admin.site.register(models.PaymentResponse, PaymentResponseAdmin)
<commit_msg>Reorder list dispay in RequestPayment and RequestResponse lists.<commit_after>
|
from django.contrib import admin
from . import models
class PaymentRequestAdmin(admin.ModelAdmin):
model = models.PaymentRequest
list_display = ("vads_trans_id", "vads_trans_date",
"vads_amount", "get_vads_currency_display")
class PaymentResponseAdmin(admin.ModelAdmin):
model = models.PaymentResponse
list_display = ("vads_trans_id", "vads_trans_date",
"vads_amount", "get_vads_currency_display",
"vads_operation_type")
admin.site.register(models.PaymentRequest, PaymentRequestAdmin)
admin.site.register(models.PaymentResponse, PaymentResponseAdmin)
|
from django.contrib import admin
from . import models
class PaymentRequestAdmin(admin.ModelAdmin):
model = models.PaymentRequest
list_display = ("vads_amount", "get_vads_currency_display",
"vads_trans_id", "vads_trans_date")
class PaymentResponseAdmin(admin.ModelAdmin):
model = models.PaymentResponse
list_display = ("vads_amount", "get_vads_currency_display",
"vads_trans_id", "vads_trans_date",
"vads_operation_type")
admin.site.register(models.PaymentRequest, PaymentRequestAdmin)
admin.site.register(models.PaymentResponse, PaymentResponseAdmin)
Reorder list dispay in RequestPayment and RequestResponse lists.from django.contrib import admin
from . import models
class PaymentRequestAdmin(admin.ModelAdmin):
model = models.PaymentRequest
list_display = ("vads_trans_id", "vads_trans_date",
"vads_amount", "get_vads_currency_display")
class PaymentResponseAdmin(admin.ModelAdmin):
model = models.PaymentResponse
list_display = ("vads_trans_id", "vads_trans_date",
"vads_amount", "get_vads_currency_display",
"vads_operation_type")
admin.site.register(models.PaymentRequest, PaymentRequestAdmin)
admin.site.register(models.PaymentResponse, PaymentResponseAdmin)
|
<commit_before>from django.contrib import admin
from . import models
class PaymentRequestAdmin(admin.ModelAdmin):
model = models.PaymentRequest
list_display = ("vads_amount", "get_vads_currency_display",
"vads_trans_id", "vads_trans_date")
class PaymentResponseAdmin(admin.ModelAdmin):
model = models.PaymentResponse
list_display = ("vads_amount", "get_vads_currency_display",
"vads_trans_id", "vads_trans_date",
"vads_operation_type")
admin.site.register(models.PaymentRequest, PaymentRequestAdmin)
admin.site.register(models.PaymentResponse, PaymentResponseAdmin)
<commit_msg>Reorder list dispay in RequestPayment and RequestResponse lists.<commit_after>from django.contrib import admin
from . import models
class PaymentRequestAdmin(admin.ModelAdmin):
model = models.PaymentRequest
list_display = ("vads_trans_id", "vads_trans_date",
"vads_amount", "get_vads_currency_display")
class PaymentResponseAdmin(admin.ModelAdmin):
model = models.PaymentResponse
list_display = ("vads_trans_id", "vads_trans_date",
"vads_amount", "get_vads_currency_display",
"vads_operation_type")
admin.site.register(models.PaymentRequest, PaymentRequestAdmin)
admin.site.register(models.PaymentResponse, PaymentResponseAdmin)
|
565ff4653b0dca4bb4831d263dae118d044b6b9c
|
test/test_molecule.py
|
test/test_molecule.py
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/master/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
Fix broken requirements file references
|
Fix broken requirements file references
|
Python
|
mit
|
nephelaiio/cookiecutter-ansible-role
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
Fix broken requirements file references
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/master/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
<commit_before>import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
<commit_msg>Fix broken requirements file references<commit_after>
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/master/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
Fix broken requirements file referencesimport pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/master/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
<commit_before>import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
<commit_msg>Fix broken requirements file references<commit_after>import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/master/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
fc4b36c34f8f9edbb688ff4d5ab1d50b4f8c6dac
|
armstrong/core/arm_layout/utils.py
|
armstrong/core/arm_layout/utils.py
|
import warnings
from django.conf import settings
from armstrong.utils.backends import GenericBackend
NEW = "ARMSTRONG_LAYOUT_BACKEND"
OLD = "ARMSTRONG_RENDER_MODEL_BACKEND"
render_model = (GenericBackend(NEW,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
if hasattr(settings, OLD):
msg = "{} is deprecated and will be removed in ArmLayout 1.4. Use {}.".format(OLD, NEW)
warnings.warn(msg, DeprecationWarning)
render_model = (GenericBackend(OLD,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
# DEPRECATED: To be removed in ArmLayout 1.4. Here for backwards compatibility
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
def deprecate(func):
def wrapper(*args, **kwargs):
msg = "Importing `{}` from this module is deprecated and will be removed in ArmLayout 1.4"
warnings.warn(msg.format(func.__name__), DeprecationWarning)
return func(*args, **kwargs)
return wrapper
mark_safe = deprecate(mark_safe)
render_to_string = deprecate(render_to_string)
get_layout_template_name = deprecate(render_model.get_layout_template_name)
|
import warnings
from django.conf import settings
from armstrong.utils.backends import GenericBackend
NEW = "ARMSTRONG_LAYOUT_BACKEND"
OLD = "ARMSTRONG_RENDER_MODEL_BACKEND"
render_model = (GenericBackend(NEW,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
if hasattr(settings, OLD):
msg = "%s is deprecated and will be removed in ArmLayout 1.4. Use %s." % (OLD, NEW)
warnings.warn(msg, DeprecationWarning)
render_model = (GenericBackend(OLD,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
# DEPRECATED: To be removed in ArmLayout 1.4. Here for backwards compatibility
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
def deprecate(func):
def wrapper(*args, **kwargs):
msg = "Importing `%s` from this module is deprecated and will be removed in ArmLayout 1.4"
warnings.warn(msg % func.__name__, DeprecationWarning)
return func(*args, **kwargs)
return wrapper
mark_safe = deprecate(mark_safe)
render_to_string = deprecate(render_to_string)
get_layout_template_name = deprecate(render_model.get_layout_template_name)
|
Use older style string formatting for Python 2.6
|
Use older style string formatting for Python 2.6
|
Python
|
apache-2.0
|
armstrong/armstrong.core.arm_layout,armstrong/armstrong.core.arm_layout
|
import warnings
from django.conf import settings
from armstrong.utils.backends import GenericBackend
NEW = "ARMSTRONG_LAYOUT_BACKEND"
OLD = "ARMSTRONG_RENDER_MODEL_BACKEND"
render_model = (GenericBackend(NEW,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
if hasattr(settings, OLD):
msg = "{} is deprecated and will be removed in ArmLayout 1.4. Use {}.".format(OLD, NEW)
warnings.warn(msg, DeprecationWarning)
render_model = (GenericBackend(OLD,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
# DEPRECATED: To be removed in ArmLayout 1.4. Here for backwards compatibility
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
def deprecate(func):
def wrapper(*args, **kwargs):
msg = "Importing `{}` from this module is deprecated and will be removed in ArmLayout 1.4"
warnings.warn(msg.format(func.__name__), DeprecationWarning)
return func(*args, **kwargs)
return wrapper
mark_safe = deprecate(mark_safe)
render_to_string = deprecate(render_to_string)
get_layout_template_name = deprecate(render_model.get_layout_template_name)
Use older style string formatting for Python 2.6
|
import warnings
from django.conf import settings
from armstrong.utils.backends import GenericBackend
NEW = "ARMSTRONG_LAYOUT_BACKEND"
OLD = "ARMSTRONG_RENDER_MODEL_BACKEND"
render_model = (GenericBackend(NEW,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
if hasattr(settings, OLD):
msg = "%s is deprecated and will be removed in ArmLayout 1.4. Use %s." % (OLD, NEW)
warnings.warn(msg, DeprecationWarning)
render_model = (GenericBackend(OLD,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
# DEPRECATED: To be removed in ArmLayout 1.4. Here for backwards compatibility
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
def deprecate(func):
def wrapper(*args, **kwargs):
msg = "Importing `%s` from this module is deprecated and will be removed in ArmLayout 1.4"
warnings.warn(msg % func.__name__, DeprecationWarning)
return func(*args, **kwargs)
return wrapper
mark_safe = deprecate(mark_safe)
render_to_string = deprecate(render_to_string)
get_layout_template_name = deprecate(render_model.get_layout_template_name)
|
<commit_before>import warnings
from django.conf import settings
from armstrong.utils.backends import GenericBackend
NEW = "ARMSTRONG_LAYOUT_BACKEND"
OLD = "ARMSTRONG_RENDER_MODEL_BACKEND"
render_model = (GenericBackend(NEW,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
if hasattr(settings, OLD):
msg = "{} is deprecated and will be removed in ArmLayout 1.4. Use {}.".format(OLD, NEW)
warnings.warn(msg, DeprecationWarning)
render_model = (GenericBackend(OLD,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
# DEPRECATED: To be removed in ArmLayout 1.4. Here for backwards compatibility
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
def deprecate(func):
def wrapper(*args, **kwargs):
msg = "Importing `{}` from this module is deprecated and will be removed in ArmLayout 1.4"
warnings.warn(msg.format(func.__name__), DeprecationWarning)
return func(*args, **kwargs)
return wrapper
mark_safe = deprecate(mark_safe)
render_to_string = deprecate(render_to_string)
get_layout_template_name = deprecate(render_model.get_layout_template_name)
<commit_msg>Use older style string formatting for Python 2.6<commit_after>
|
import warnings
from django.conf import settings
from armstrong.utils.backends import GenericBackend
NEW = "ARMSTRONG_LAYOUT_BACKEND"
OLD = "ARMSTRONG_RENDER_MODEL_BACKEND"
render_model = (GenericBackend(NEW,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
if hasattr(settings, OLD):
msg = "%s is deprecated and will be removed in ArmLayout 1.4. Use %s." % (OLD, NEW)
warnings.warn(msg, DeprecationWarning)
render_model = (GenericBackend(OLD,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
# DEPRECATED: To be removed in ArmLayout 1.4. Here for backwards compatibility
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
def deprecate(func):
def wrapper(*args, **kwargs):
msg = "Importing `%s` from this module is deprecated and will be removed in ArmLayout 1.4"
warnings.warn(msg % func.__name__, DeprecationWarning)
return func(*args, **kwargs)
return wrapper
mark_safe = deprecate(mark_safe)
render_to_string = deprecate(render_to_string)
get_layout_template_name = deprecate(render_model.get_layout_template_name)
|
import warnings
from django.conf import settings
from armstrong.utils.backends import GenericBackend
NEW = "ARMSTRONG_LAYOUT_BACKEND"
OLD = "ARMSTRONG_RENDER_MODEL_BACKEND"
render_model = (GenericBackend(NEW,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
if hasattr(settings, OLD):
msg = "{} is deprecated and will be removed in ArmLayout 1.4. Use {}.".format(OLD, NEW)
warnings.warn(msg, DeprecationWarning)
render_model = (GenericBackend(OLD,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
# DEPRECATED: To be removed in ArmLayout 1.4. Here for backwards compatibility
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
def deprecate(func):
def wrapper(*args, **kwargs):
msg = "Importing `{}` from this module is deprecated and will be removed in ArmLayout 1.4"
warnings.warn(msg.format(func.__name__), DeprecationWarning)
return func(*args, **kwargs)
return wrapper
mark_safe = deprecate(mark_safe)
render_to_string = deprecate(render_to_string)
get_layout_template_name = deprecate(render_model.get_layout_template_name)
Use older style string formatting for Python 2.6import warnings
from django.conf import settings
from armstrong.utils.backends import GenericBackend
NEW = "ARMSTRONG_LAYOUT_BACKEND"
OLD = "ARMSTRONG_RENDER_MODEL_BACKEND"
render_model = (GenericBackend(NEW,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
if hasattr(settings, OLD):
msg = "%s is deprecated and will be removed in ArmLayout 1.4. Use %s." % (OLD, NEW)
warnings.warn(msg, DeprecationWarning)
render_model = (GenericBackend(OLD,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
# DEPRECATED: To be removed in ArmLayout 1.4. Here for backwards compatibility
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
def deprecate(func):
def wrapper(*args, **kwargs):
msg = "Importing `%s` from this module is deprecated and will be removed in ArmLayout 1.4"
warnings.warn(msg % func.__name__, DeprecationWarning)
return func(*args, **kwargs)
return wrapper
mark_safe = deprecate(mark_safe)
render_to_string = deprecate(render_to_string)
get_layout_template_name = deprecate(render_model.get_layout_template_name)
|
<commit_before>import warnings
from django.conf import settings
from armstrong.utils.backends import GenericBackend
NEW = "ARMSTRONG_LAYOUT_BACKEND"
OLD = "ARMSTRONG_RENDER_MODEL_BACKEND"
render_model = (GenericBackend(NEW,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
if hasattr(settings, OLD):
msg = "{} is deprecated and will be removed in ArmLayout 1.4. Use {}.".format(OLD, NEW)
warnings.warn(msg, DeprecationWarning)
render_model = (GenericBackend(OLD,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
# DEPRECATED: To be removed in ArmLayout 1.4. Here for backwards compatibility
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
def deprecate(func):
def wrapper(*args, **kwargs):
msg = "Importing `{}` from this module is deprecated and will be removed in ArmLayout 1.4"
warnings.warn(msg.format(func.__name__), DeprecationWarning)
return func(*args, **kwargs)
return wrapper
mark_safe = deprecate(mark_safe)
render_to_string = deprecate(render_to_string)
get_layout_template_name = deprecate(render_model.get_layout_template_name)
<commit_msg>Use older style string formatting for Python 2.6<commit_after>import warnings
from django.conf import settings
from armstrong.utils.backends import GenericBackend
NEW = "ARMSTRONG_LAYOUT_BACKEND"
OLD = "ARMSTRONG_RENDER_MODEL_BACKEND"
render_model = (GenericBackend(NEW,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
if hasattr(settings, OLD):
msg = "%s is deprecated and will be removed in ArmLayout 1.4. Use %s." % (OLD, NEW)
warnings.warn(msg, DeprecationWarning)
render_model = (GenericBackend(OLD,
defaults="armstrong.core.arm_layout.backends.BasicLayoutBackend")
.get_backend())
# DEPRECATED: To be removed in ArmLayout 1.4. Here for backwards compatibility
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
def deprecate(func):
def wrapper(*args, **kwargs):
msg = "Importing `%s` from this module is deprecated and will be removed in ArmLayout 1.4"
warnings.warn(msg % func.__name__, DeprecationWarning)
return func(*args, **kwargs)
return wrapper
mark_safe = deprecate(mark_safe)
render_to_string = deprecate(render_to_string)
get_layout_template_name = deprecate(render_model.get_layout_template_name)
|
877f59134c64f3c2e50436289b1cd676d471f66f
|
src/gramcore/features/tests/test_descriptors.py
|
src/gramcore/features/tests/test_descriptors.py
|
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
There are already enough tests in skimage for this, just adding so to
document how many values are returned and why.
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
Add note in hog test doc string
|
Add note in hog test doc string
|
Python
|
mit
|
cpsaltis/pythogram-core
|
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
Add note in hog test doc string
|
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
There are already enough tests in skimage for this, just adding so to
document how many values are returned and why.
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
<commit_before>"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
<commit_msg>Add note in hog test doc string<commit_after>
|
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
There are already enough tests in skimage for this, just adding so to
document how many values are returned and why.
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
Add note in hog test doc string"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
There are already enough tests in skimage for this, just adding so to
document how many values are returned and why.
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
<commit_before>"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
<commit_msg>Add note in hog test doc string<commit_after>"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
There are already enough tests in skimage for this, just adding so to
document how many values are returned and why.
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
e77cb240d522da47208b60384c40f03f5c9182e3
|
tests/test_encoder.py
|
tests/test_encoder.py
|
# -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
|
# -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
def test_cube_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.IsisCubeLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
def test_pds_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.PDSLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
|
Add tests for cube and isis encoders.
|
Add tests for cube and isis encoders.
|
Python
|
bsd-3-clause
|
pbvarga1/pvl,bvnayak/pvl,wtolson/pvl,planetarypy/pvl
|
# -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
Add tests for cube and isis encoders.
|
# -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
def test_cube_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.IsisCubeLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
def test_pds_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.PDSLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
|
<commit_before># -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
<commit_msg>Add tests for cube and isis encoders.<commit_after>
|
# -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
def test_cube_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.IsisCubeLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
def test_pds_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.PDSLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
|
# -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
Add tests for cube and isis encoders.# -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
def test_cube_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.IsisCubeLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
def test_pds_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.PDSLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
|
<commit_before># -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
<commit_msg>Add tests for cube and isis encoders.<commit_after># -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
def test_cube_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.IsisCubeLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
def test_pds_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.PDSLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
|
710a2a6d9c462041bae6c41f0578d99262c6a861
|
tests/test_execute.py
|
tests/test_execute.py
|
import asyncpg
from asyncpg import _testbase as tb
class TestExecuteScript(tb.ConnectedTestCase):
async def test_execute_script_1(self):
r = await self.con.execute('''
SELECT 1;
SELECT true FROM pg_type WHERE false = true;
SELECT 2;
''')
self.assertIsNone(r)
async def test_execute_script_check_transactionality(self):
with self.assertRaises(asyncpg.Error):
await self.con.execute('''
CREATE TABLE mytab (a int);
SELECT * FROM mytab WHERE 1 / 0 = 1;
''')
with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'):
await self.con.prepare('''
SELECT * FROM mytab
''')
|
import asyncpg
from asyncpg import _testbase as tb
class TestExecuteScript(tb.ConnectedTestCase):
async def test_execute_script_1(self):
r = await self.con.execute('''
SELECT 1;
SELECT true FROM pg_type WHERE false = true;
SELECT 2;
''')
self.assertIsNone(r)
async def test_execute_script_check_transactionality(self):
with self.assertRaises(asyncpg.Error):
await self.con.execute('''
CREATE TABLE mytab (a int);
SELECT * FROM mytab WHERE 1 / 0 = 1;
''')
with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'):
await self.con.prepare('''
SELECT * FROM mytab
''')
async def test_execute_exceptions_1(self):
with self.assertRaisesRegex(asyncpg.Error,
'relation "__dne__" does not exist'):
await self.con.execute('select * from __dne__')
|
Test that con.execute() propagate Postgres exceptions
|
Test that con.execute() propagate Postgres exceptions
|
Python
|
apache-2.0
|
MagicStack/asyncpg,MagicStack/asyncpg
|
import asyncpg
from asyncpg import _testbase as tb
class TestExecuteScript(tb.ConnectedTestCase):
async def test_execute_script_1(self):
r = await self.con.execute('''
SELECT 1;
SELECT true FROM pg_type WHERE false = true;
SELECT 2;
''')
self.assertIsNone(r)
async def test_execute_script_check_transactionality(self):
with self.assertRaises(asyncpg.Error):
await self.con.execute('''
CREATE TABLE mytab (a int);
SELECT * FROM mytab WHERE 1 / 0 = 1;
''')
with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'):
await self.con.prepare('''
SELECT * FROM mytab
''')
Test that con.execute() propagate Postgres exceptions
|
import asyncpg
from asyncpg import _testbase as tb
class TestExecuteScript(tb.ConnectedTestCase):
async def test_execute_script_1(self):
r = await self.con.execute('''
SELECT 1;
SELECT true FROM pg_type WHERE false = true;
SELECT 2;
''')
self.assertIsNone(r)
async def test_execute_script_check_transactionality(self):
with self.assertRaises(asyncpg.Error):
await self.con.execute('''
CREATE TABLE mytab (a int);
SELECT * FROM mytab WHERE 1 / 0 = 1;
''')
with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'):
await self.con.prepare('''
SELECT * FROM mytab
''')
async def test_execute_exceptions_1(self):
with self.assertRaisesRegex(asyncpg.Error,
'relation "__dne__" does not exist'):
await self.con.execute('select * from __dne__')
|
<commit_before>import asyncpg
from asyncpg import _testbase as tb
class TestExecuteScript(tb.ConnectedTestCase):
async def test_execute_script_1(self):
r = await self.con.execute('''
SELECT 1;
SELECT true FROM pg_type WHERE false = true;
SELECT 2;
''')
self.assertIsNone(r)
async def test_execute_script_check_transactionality(self):
with self.assertRaises(asyncpg.Error):
await self.con.execute('''
CREATE TABLE mytab (a int);
SELECT * FROM mytab WHERE 1 / 0 = 1;
''')
with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'):
await self.con.prepare('''
SELECT * FROM mytab
''')
<commit_msg>Test that con.execute() propagate Postgres exceptions<commit_after>
|
import asyncpg
from asyncpg import _testbase as tb
class TestExecuteScript(tb.ConnectedTestCase):
async def test_execute_script_1(self):
r = await self.con.execute('''
SELECT 1;
SELECT true FROM pg_type WHERE false = true;
SELECT 2;
''')
self.assertIsNone(r)
async def test_execute_script_check_transactionality(self):
with self.assertRaises(asyncpg.Error):
await self.con.execute('''
CREATE TABLE mytab (a int);
SELECT * FROM mytab WHERE 1 / 0 = 1;
''')
with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'):
await self.con.prepare('''
SELECT * FROM mytab
''')
async def test_execute_exceptions_1(self):
with self.assertRaisesRegex(asyncpg.Error,
'relation "__dne__" does not exist'):
await self.con.execute('select * from __dne__')
|
import asyncpg
from asyncpg import _testbase as tb
class TestExecuteScript(tb.ConnectedTestCase):
async def test_execute_script_1(self):
r = await self.con.execute('''
SELECT 1;
SELECT true FROM pg_type WHERE false = true;
SELECT 2;
''')
self.assertIsNone(r)
async def test_execute_script_check_transactionality(self):
with self.assertRaises(asyncpg.Error):
await self.con.execute('''
CREATE TABLE mytab (a int);
SELECT * FROM mytab WHERE 1 / 0 = 1;
''')
with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'):
await self.con.prepare('''
SELECT * FROM mytab
''')
Test that con.execute() propagate Postgres exceptionsimport asyncpg
from asyncpg import _testbase as tb
class TestExecuteScript(tb.ConnectedTestCase):
async def test_execute_script_1(self):
r = await self.con.execute('''
SELECT 1;
SELECT true FROM pg_type WHERE false = true;
SELECT 2;
''')
self.assertIsNone(r)
async def test_execute_script_check_transactionality(self):
with self.assertRaises(asyncpg.Error):
await self.con.execute('''
CREATE TABLE mytab (a int);
SELECT * FROM mytab WHERE 1 / 0 = 1;
''')
with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'):
await self.con.prepare('''
SELECT * FROM mytab
''')
async def test_execute_exceptions_1(self):
with self.assertRaisesRegex(asyncpg.Error,
'relation "__dne__" does not exist'):
await self.con.execute('select * from __dne__')
|
<commit_before>import asyncpg
from asyncpg import _testbase as tb
class TestExecuteScript(tb.ConnectedTestCase):
async def test_execute_script_1(self):
r = await self.con.execute('''
SELECT 1;
SELECT true FROM pg_type WHERE false = true;
SELECT 2;
''')
self.assertIsNone(r)
async def test_execute_script_check_transactionality(self):
with self.assertRaises(asyncpg.Error):
await self.con.execute('''
CREATE TABLE mytab (a int);
SELECT * FROM mytab WHERE 1 / 0 = 1;
''')
with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'):
await self.con.prepare('''
SELECT * FROM mytab
''')
<commit_msg>Test that con.execute() propagate Postgres exceptions<commit_after>import asyncpg
from asyncpg import _testbase as tb
class TestExecuteScript(tb.ConnectedTestCase):
async def test_execute_script_1(self):
r = await self.con.execute('''
SELECT 1;
SELECT true FROM pg_type WHERE false = true;
SELECT 2;
''')
self.assertIsNone(r)
async def test_execute_script_check_transactionality(self):
with self.assertRaises(asyncpg.Error):
await self.con.execute('''
CREATE TABLE mytab (a int);
SELECT * FROM mytab WHERE 1 / 0 = 1;
''')
with self.assertRaisesRegex(asyncpg.Error, '"mytab" does not exist'):
await self.con.prepare('''
SELECT * FROM mytab
''')
async def test_execute_exceptions_1(self):
with self.assertRaisesRegex(asyncpg.Error,
'relation "__dne__" does not exist'):
await self.con.execute('select * from __dne__')
|
d95d817bdb1fba7eb0ce0cdabcd64a9908796d2a
|
tests/unit/test_ls.py
|
tests/unit/test_ls.py
|
from tests.framework.cli_testcase import CliTestCase
from tests.framework.constants import GO_EP1_ID
class LsTests(CliTestCase):
"""
Tests globus ls command
"""
def test_path(self):
"""
Does an ls on EP1:/, confirms expected results.
"""
path = "/"
output = self.run_line("globus ls {}:{}".format(GO_EP1_ID, path))
expected = ["home/", "mnt/", "not shareable/", "share/"]
for item in expected:
self.assertIn(item, output)
def test_recursive(self):
"""
Confirms --recursive ls on EP1:/share/ finds file1.txt
"""
output = self.run_line("globus ls -r {}:/share/".format(GO_EP1_ID))
self.assertIn("file1.txt", output)
def test_depth(self):
"""
Confirms setting depth to 1 on a --recursive ls of EP1:/
finds godata but not file1.txt
"""
output = self.run_line(("globus ls -r --recursive-depth-limit 1 {}:/"
.format(GO_EP1_ID)))
self.assertNotIn("file1.txt", output)
def test_recursive_json(self):
"""
Confirms -F json works with the RecursiveLsResponse
"""
output = self.run_line("globus ls -r -F json {}:/".format(GO_EP1_ID))
self.assertIn('"DATA":', output)
self.assertIn('"name": "share/godata/file1.txt"', output)
|
from tests.framework.cli_testcase import CliTestCase
from tests.framework.constants import GO_EP1_ID
class LsTests(CliTestCase):
"""
Tests globus ls command
"""
def test_path(self):
"""
Does an ls on EP1:/, confirms expected results.
"""
path = "/"
output = self.run_line("globus ls {}:{}".format(GO_EP1_ID, path))
expected = ["home/", "mnt/", "not shareable/", "share/"]
for item in expected:
self.assertIn(item, output)
def test_recursive(self):
"""
Confirms --recursive ls on EP1:/share/ finds file1.txt
"""
output = self.run_line("globus ls -r {}:/share/".format(GO_EP1_ID))
self.assertIn("file1.txt", output)
def test_depth(self):
"""
Confirms setting depth to 1 on a --recursive ls of EP1:/
finds godata but not file1.txt
"""
output = self.run_line(("globus ls -r --recursive-depth-limit 1 {}:/"
.format(GO_EP1_ID)))
self.assertNotIn("file1.txt", output)
def test_recursive_json(self):
"""
Confirms -F json works with the RecursiveLsResponse
"""
output = self.run_line(
"globus ls -r -F json {}:/share".format(GO_EP1_ID))
self.assertIn('"DATA":', output)
self.assertIn('"name": "godata/file1.txt"', output)
|
Fix concurrency bug in ls tests
|
Fix concurrency bug in ls tests
|
Python
|
apache-2.0
|
globus/globus-cli,globus/globus-cli
|
from tests.framework.cli_testcase import CliTestCase
from tests.framework.constants import GO_EP1_ID
class LsTests(CliTestCase):
"""
Tests globus ls command
"""
def test_path(self):
"""
Does an ls on EP1:/, confirms expected results.
"""
path = "/"
output = self.run_line("globus ls {}:{}".format(GO_EP1_ID, path))
expected = ["home/", "mnt/", "not shareable/", "share/"]
for item in expected:
self.assertIn(item, output)
def test_recursive(self):
"""
Confirms --recursive ls on EP1:/share/ finds file1.txt
"""
output = self.run_line("globus ls -r {}:/share/".format(GO_EP1_ID))
self.assertIn("file1.txt", output)
def test_depth(self):
"""
Confirms setting depth to 1 on a --recursive ls of EP1:/
finds godata but not file1.txt
"""
output = self.run_line(("globus ls -r --recursive-depth-limit 1 {}:/"
.format(GO_EP1_ID)))
self.assertNotIn("file1.txt", output)
def test_recursive_json(self):
"""
Confirms -F json works with the RecursiveLsResponse
"""
output = self.run_line("globus ls -r -F json {}:/".format(GO_EP1_ID))
self.assertIn('"DATA":', output)
self.assertIn('"name": "share/godata/file1.txt"', output)
Fix concurrency bug in ls tests
|
from tests.framework.cli_testcase import CliTestCase
from tests.framework.constants import GO_EP1_ID
class LsTests(CliTestCase):
"""
Tests globus ls command
"""
def test_path(self):
"""
Does an ls on EP1:/, confirms expected results.
"""
path = "/"
output = self.run_line("globus ls {}:{}".format(GO_EP1_ID, path))
expected = ["home/", "mnt/", "not shareable/", "share/"]
for item in expected:
self.assertIn(item, output)
def test_recursive(self):
"""
Confirms --recursive ls on EP1:/share/ finds file1.txt
"""
output = self.run_line("globus ls -r {}:/share/".format(GO_EP1_ID))
self.assertIn("file1.txt", output)
def test_depth(self):
"""
Confirms setting depth to 1 on a --recursive ls of EP1:/
finds godata but not file1.txt
"""
output = self.run_line(("globus ls -r --recursive-depth-limit 1 {}:/"
.format(GO_EP1_ID)))
self.assertNotIn("file1.txt", output)
def test_recursive_json(self):
"""
Confirms -F json works with the RecursiveLsResponse
"""
output = self.run_line(
"globus ls -r -F json {}:/share".format(GO_EP1_ID))
self.assertIn('"DATA":', output)
self.assertIn('"name": "godata/file1.txt"', output)
|
<commit_before>from tests.framework.cli_testcase import CliTestCase
from tests.framework.constants import GO_EP1_ID
class LsTests(CliTestCase):
"""
Tests globus ls command
"""
def test_path(self):
"""
Does an ls on EP1:/, confirms expected results.
"""
path = "/"
output = self.run_line("globus ls {}:{}".format(GO_EP1_ID, path))
expected = ["home/", "mnt/", "not shareable/", "share/"]
for item in expected:
self.assertIn(item, output)
def test_recursive(self):
"""
Confirms --recursive ls on EP1:/share/ finds file1.txt
"""
output = self.run_line("globus ls -r {}:/share/".format(GO_EP1_ID))
self.assertIn("file1.txt", output)
def test_depth(self):
"""
Confirms setting depth to 1 on a --recursive ls of EP1:/
finds godata but not file1.txt
"""
output = self.run_line(("globus ls -r --recursive-depth-limit 1 {}:/"
.format(GO_EP1_ID)))
self.assertNotIn("file1.txt", output)
def test_recursive_json(self):
"""
Confirms -F json works with the RecursiveLsResponse
"""
output = self.run_line("globus ls -r -F json {}:/".format(GO_EP1_ID))
self.assertIn('"DATA":', output)
self.assertIn('"name": "share/godata/file1.txt"', output)
<commit_msg>Fix concurrency bug in ls tests<commit_after>
|
from tests.framework.cli_testcase import CliTestCase
from tests.framework.constants import GO_EP1_ID
class LsTests(CliTestCase):
"""
Tests globus ls command
"""
def test_path(self):
"""
Does an ls on EP1:/, confirms expected results.
"""
path = "/"
output = self.run_line("globus ls {}:{}".format(GO_EP1_ID, path))
expected = ["home/", "mnt/", "not shareable/", "share/"]
for item in expected:
self.assertIn(item, output)
def test_recursive(self):
"""
Confirms --recursive ls on EP1:/share/ finds file1.txt
"""
output = self.run_line("globus ls -r {}:/share/".format(GO_EP1_ID))
self.assertIn("file1.txt", output)
def test_depth(self):
"""
Confirms setting depth to 1 on a --recursive ls of EP1:/
finds godata but not file1.txt
"""
output = self.run_line(("globus ls -r --recursive-depth-limit 1 {}:/"
.format(GO_EP1_ID)))
self.assertNotIn("file1.txt", output)
def test_recursive_json(self):
"""
Confirms -F json works with the RecursiveLsResponse
"""
output = self.run_line(
"globus ls -r -F json {}:/share".format(GO_EP1_ID))
self.assertIn('"DATA":', output)
self.assertIn('"name": "godata/file1.txt"', output)
|
from tests.framework.cli_testcase import CliTestCase
from tests.framework.constants import GO_EP1_ID
class LsTests(CliTestCase):
"""
Tests globus ls command
"""
def test_path(self):
"""
Does an ls on EP1:/, confirms expected results.
"""
path = "/"
output = self.run_line("globus ls {}:{}".format(GO_EP1_ID, path))
expected = ["home/", "mnt/", "not shareable/", "share/"]
for item in expected:
self.assertIn(item, output)
def test_recursive(self):
"""
Confirms --recursive ls on EP1:/share/ finds file1.txt
"""
output = self.run_line("globus ls -r {}:/share/".format(GO_EP1_ID))
self.assertIn("file1.txt", output)
def test_depth(self):
"""
Confirms setting depth to 1 on a --recursive ls of EP1:/
finds godata but not file1.txt
"""
output = self.run_line(("globus ls -r --recursive-depth-limit 1 {}:/"
.format(GO_EP1_ID)))
self.assertNotIn("file1.txt", output)
def test_recursive_json(self):
"""
Confirms -F json works with the RecursiveLsResponse
"""
output = self.run_line("globus ls -r -F json {}:/".format(GO_EP1_ID))
self.assertIn('"DATA":', output)
self.assertIn('"name": "share/godata/file1.txt"', output)
Fix concurrency bug in ls testsfrom tests.framework.cli_testcase import CliTestCase
from tests.framework.constants import GO_EP1_ID
class LsTests(CliTestCase):
"""
Tests globus ls command
"""
def test_path(self):
"""
Does an ls on EP1:/, confirms expected results.
"""
path = "/"
output = self.run_line("globus ls {}:{}".format(GO_EP1_ID, path))
expected = ["home/", "mnt/", "not shareable/", "share/"]
for item in expected:
self.assertIn(item, output)
def test_recursive(self):
"""
Confirms --recursive ls on EP1:/share/ finds file1.txt
"""
output = self.run_line("globus ls -r {}:/share/".format(GO_EP1_ID))
self.assertIn("file1.txt", output)
def test_depth(self):
"""
Confirms setting depth to 1 on a --recursive ls of EP1:/
finds godata but not file1.txt
"""
output = self.run_line(("globus ls -r --recursive-depth-limit 1 {}:/"
.format(GO_EP1_ID)))
self.assertNotIn("file1.txt", output)
def test_recursive_json(self):
"""
Confirms -F json works with the RecursiveLsResponse
"""
output = self.run_line(
"globus ls -r -F json {}:/share".format(GO_EP1_ID))
self.assertIn('"DATA":', output)
self.assertIn('"name": "godata/file1.txt"', output)
|
<commit_before>from tests.framework.cli_testcase import CliTestCase
from tests.framework.constants import GO_EP1_ID
class LsTests(CliTestCase):
"""
Tests globus ls command
"""
def test_path(self):
"""
Does an ls on EP1:/, confirms expected results.
"""
path = "/"
output = self.run_line("globus ls {}:{}".format(GO_EP1_ID, path))
expected = ["home/", "mnt/", "not shareable/", "share/"]
for item in expected:
self.assertIn(item, output)
def test_recursive(self):
"""
Confirms --recursive ls on EP1:/share/ finds file1.txt
"""
output = self.run_line("globus ls -r {}:/share/".format(GO_EP1_ID))
self.assertIn("file1.txt", output)
def test_depth(self):
"""
Confirms setting depth to 1 on a --recursive ls of EP1:/
finds godata but not file1.txt
"""
output = self.run_line(("globus ls -r --recursive-depth-limit 1 {}:/"
.format(GO_EP1_ID)))
self.assertNotIn("file1.txt", output)
def test_recursive_json(self):
"""
Confirms -F json works with the RecursiveLsResponse
"""
output = self.run_line("globus ls -r -F json {}:/".format(GO_EP1_ID))
self.assertIn('"DATA":', output)
self.assertIn('"name": "share/godata/file1.txt"', output)
<commit_msg>Fix concurrency bug in ls tests<commit_after>from tests.framework.cli_testcase import CliTestCase
from tests.framework.constants import GO_EP1_ID
class LsTests(CliTestCase):
"""
Tests globus ls command
"""
def test_path(self):
"""
Does an ls on EP1:/, confirms expected results.
"""
path = "/"
output = self.run_line("globus ls {}:{}".format(GO_EP1_ID, path))
expected = ["home/", "mnt/", "not shareable/", "share/"]
for item in expected:
self.assertIn(item, output)
def test_recursive(self):
"""
Confirms --recursive ls on EP1:/share/ finds file1.txt
"""
output = self.run_line("globus ls -r {}:/share/".format(GO_EP1_ID))
self.assertIn("file1.txt", output)
def test_depth(self):
"""
Confirms setting depth to 1 on a --recursive ls of EP1:/
finds godata but not file1.txt
"""
output = self.run_line(("globus ls -r --recursive-depth-limit 1 {}:/"
.format(GO_EP1_ID)))
self.assertNotIn("file1.txt", output)
def test_recursive_json(self):
"""
Confirms -F json works with the RecursiveLsResponse
"""
output = self.run_line(
"globus ls -r -F json {}:/share".format(GO_EP1_ID))
self.assertIn('"DATA":', output)
self.assertIn('"name": "godata/file1.txt"', output)
|
71e2bc7976dcb4230c20e2c60a7e23634c38603f
|
apps/references/autoref.py
|
apps/references/autoref.py
|
import os.path
import string
import urllib, re
from datetime import datetime
from xml.dom.minidom import parse, parseString
# Django
from django.core import serializers
from django.conf import settings
from django.db import models
# Methodmint
def pubmed(keywords, latest_query=None):
# Get matching publications from Pubmed service
# We explode the keywords append [TW] for all text-search
# then build a string for the datetime since last update
keywordl = keywords.split(',')
keywordq = '(' + '[TW] '.join(keywordl) + '[TW])' # produce[TW] this[TW] string[TW]
if latest_query == None:
timeq = ''
else:
timeq = ' AND ("%s"[Date - Publication] : "3000"[Date - Publication])' % latest_query.strftime("%Y/%m/%d")
print "Querying pubmed with: %s %s" % (keywordq, timeq)
f = urllib.urlopen("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=%s %s" % (keywordq, timeq))
# Build DOM for requested data
dom = parse(f)
f.close()
uris = []
if dom:
if dom.getElementsByTagName('Id'):
for item in dom.getElementsByTagName('Id'):
uris.append( 'pmid:%d' % int( item.childNodes[0].data ) )
uris = uris[:25] # Limit max number of subsequent requests
return uris
|
import os.path
import string
import urllib, re
from datetime import datetime
from xml.dom.minidom import parse, parseString
# Django
from django.core import serializers
from django.conf import settings
from django.db import models
# Methodmint
def pubmed(keywords, latest_query=None):
# Get matching publications from Pubmed service
# We explode the keywords append [TW] for all text-search
# then build a string for the datetime since last update
keywordl = keywords.split(',')
keywordq = '(' + '[TW] '.join(keywordl) + '[TW])' # produce[TW] this[TW] string[TW]
if latest_query == None:
timeq = ''
else:
timeq = ' AND ("%s"[Date - Epub] : "3000"[Date - Epub])' % latest_query.strftime("%Y/%m/%d")
print "Querying pubmed with: %s %s" % (keywordq, timeq)
f = urllib.urlopen("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=%s %s" % (keywordq, timeq))
# Build DOM for requested data
dom = parse(f)
f.close()
uris = []
if dom:
if dom.getElementsByTagName('Id'):
for item in dom.getElementsByTagName('Id'):
uris.append( 'pmid:%d' % int( item.childNodes[0].data ) )
uris = uris[:25] # Limit max number of subsequent requests
return uris
|
Change publication date to Epub; more up-to-date
|
Change publication date to Epub; more up-to-date
|
Python
|
bsd-3-clause
|
mfitzp/django-golifescience
|
import os.path
import string
import urllib, re
from datetime import datetime
from xml.dom.minidom import parse, parseString
# Django
from django.core import serializers
from django.conf import settings
from django.db import models
# Methodmint
def pubmed(keywords, latest_query=None):
# Get matching publications from Pubmed service
# We explode the keywords append [TW] for all text-search
# then build a string for the datetime since last update
keywordl = keywords.split(',')
keywordq = '(' + '[TW] '.join(keywordl) + '[TW])' # produce[TW] this[TW] string[TW]
if latest_query == None:
timeq = ''
else:
timeq = ' AND ("%s"[Date - Publication] : "3000"[Date - Publication])' % latest_query.strftime("%Y/%m/%d")
print "Querying pubmed with: %s %s" % (keywordq, timeq)
f = urllib.urlopen("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=%s %s" % (keywordq, timeq))
# Build DOM for requested data
dom = parse(f)
f.close()
uris = []
if dom:
if dom.getElementsByTagName('Id'):
for item in dom.getElementsByTagName('Id'):
uris.append( 'pmid:%d' % int( item.childNodes[0].data ) )
uris = uris[:25] # Limit max number of subsequent requests
return uris
Change publication date to Epub; more up-to-date
|
import os.path
import string
import urllib, re
from datetime import datetime
from xml.dom.minidom import parse, parseString
# Django
from django.core import serializers
from django.conf import settings
from django.db import models
# Methodmint
def pubmed(keywords, latest_query=None):
# Get matching publications from Pubmed service
# We explode the keywords append [TW] for all text-search
# then build a string for the datetime since last update
keywordl = keywords.split(',')
keywordq = '(' + '[TW] '.join(keywordl) + '[TW])' # produce[TW] this[TW] string[TW]
if latest_query == None:
timeq = ''
else:
timeq = ' AND ("%s"[Date - Epub] : "3000"[Date - Epub])' % latest_query.strftime("%Y/%m/%d")
print "Querying pubmed with: %s %s" % (keywordq, timeq)
f = urllib.urlopen("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=%s %s" % (keywordq, timeq))
# Build DOM for requested data
dom = parse(f)
f.close()
uris = []
if dom:
if dom.getElementsByTagName('Id'):
for item in dom.getElementsByTagName('Id'):
uris.append( 'pmid:%d' % int( item.childNodes[0].data ) )
uris = uris[:25] # Limit max number of subsequent requests
return uris
|
<commit_before>import os.path
import string
import urllib, re
from datetime import datetime
from xml.dom.minidom import parse, parseString
# Django
from django.core import serializers
from django.conf import settings
from django.db import models
# Methodmint
def pubmed(keywords, latest_query=None):
# Get matching publications from Pubmed service
# We explode the keywords append [TW] for all text-search
# then build a string for the datetime since last update
keywordl = keywords.split(',')
keywordq = '(' + '[TW] '.join(keywordl) + '[TW])' # produce[TW] this[TW] string[TW]
if latest_query == None:
timeq = ''
else:
timeq = ' AND ("%s"[Date - Publication] : "3000"[Date - Publication])' % latest_query.strftime("%Y/%m/%d")
print "Querying pubmed with: %s %s" % (keywordq, timeq)
f = urllib.urlopen("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=%s %s" % (keywordq, timeq))
# Build DOM for requested data
dom = parse(f)
f.close()
uris = []
if dom:
if dom.getElementsByTagName('Id'):
for item in dom.getElementsByTagName('Id'):
uris.append( 'pmid:%d' % int( item.childNodes[0].data ) )
uris = uris[:25] # Limit max number of subsequent requests
return uris
<commit_msg>Change publication date to Epub; more up-to-date<commit_after>
|
import os.path
import string
import urllib, re
from datetime import datetime
from xml.dom.minidom import parse, parseString
# Django
from django.core import serializers
from django.conf import settings
from django.db import models
# Methodmint
def pubmed(keywords, latest_query=None):
# Get matching publications from Pubmed service
# We explode the keywords append [TW] for all text-search
# then build a string for the datetime since last update
keywordl = keywords.split(',')
keywordq = '(' + '[TW] '.join(keywordl) + '[TW])' # produce[TW] this[TW] string[TW]
if latest_query == None:
timeq = ''
else:
timeq = ' AND ("%s"[Date - Epub] : "3000"[Date - Epub])' % latest_query.strftime("%Y/%m/%d")
print "Querying pubmed with: %s %s" % (keywordq, timeq)
f = urllib.urlopen("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=%s %s" % (keywordq, timeq))
# Build DOM for requested data
dom = parse(f)
f.close()
uris = []
if dom:
if dom.getElementsByTagName('Id'):
for item in dom.getElementsByTagName('Id'):
uris.append( 'pmid:%d' % int( item.childNodes[0].data ) )
uris = uris[:25] # Limit max number of subsequent requests
return uris
|
import os.path
import string
import urllib, re
from datetime import datetime
from xml.dom.minidom import parse, parseString
# Django
from django.core import serializers
from django.conf import settings
from django.db import models
# Methodmint
def pubmed(keywords, latest_query=None):
# Get matching publications from Pubmed service
# We explode the keywords append [TW] for all text-search
# then build a string for the datetime since last update
keywordl = keywords.split(',')
keywordq = '(' + '[TW] '.join(keywordl) + '[TW])' # produce[TW] this[TW] string[TW]
if latest_query == None:
timeq = ''
else:
timeq = ' AND ("%s"[Date - Publication] : "3000"[Date - Publication])' % latest_query.strftime("%Y/%m/%d")
print "Querying pubmed with: %s %s" % (keywordq, timeq)
f = urllib.urlopen("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=%s %s" % (keywordq, timeq))
# Build DOM for requested data
dom = parse(f)
f.close()
uris = []
if dom:
if dom.getElementsByTagName('Id'):
for item in dom.getElementsByTagName('Id'):
uris.append( 'pmid:%d' % int( item.childNodes[0].data ) )
uris = uris[:25] # Limit max number of subsequent requests
return uris
Change publication date to Epub; more up-to-dateimport os.path
import string
import urllib, re
from datetime import datetime
from xml.dom.minidom import parse, parseString
# Django
from django.core import serializers
from django.conf import settings
from django.db import models
# Methodmint
def pubmed(keywords, latest_query=None):
# Get matching publications from Pubmed service
# We explode the keywords append [TW] for all text-search
# then build a string for the datetime since last update
keywordl = keywords.split(',')
keywordq = '(' + '[TW] '.join(keywordl) + '[TW])' # produce[TW] this[TW] string[TW]
if latest_query == None:
timeq = ''
else:
timeq = ' AND ("%s"[Date - Epub] : "3000"[Date - Epub])' % latest_query.strftime("%Y/%m/%d")
print "Querying pubmed with: %s %s" % (keywordq, timeq)
f = urllib.urlopen("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=%s %s" % (keywordq, timeq))
# Build DOM for requested data
dom = parse(f)
f.close()
uris = []
if dom:
if dom.getElementsByTagName('Id'):
for item in dom.getElementsByTagName('Id'):
uris.append( 'pmid:%d' % int( item.childNodes[0].data ) )
uris = uris[:25] # Limit max number of subsequent requests
return uris
|
<commit_before>import os.path
import string
import urllib, re
from datetime import datetime
from xml.dom.minidom import parse, parseString
# Django
from django.core import serializers
from django.conf import settings
from django.db import models
# Methodmint
def pubmed(keywords, latest_query=None):
# Get matching publications from Pubmed service
# We explode the keywords append [TW] for all text-search
# then build a string for the datetime since last update
keywordl = keywords.split(',')
keywordq = '(' + '[TW] '.join(keywordl) + '[TW])' # produce[TW] this[TW] string[TW]
if latest_query == None:
timeq = ''
else:
timeq = ' AND ("%s"[Date - Publication] : "3000"[Date - Publication])' % latest_query.strftime("%Y/%m/%d")
print "Querying pubmed with: %s %s" % (keywordq, timeq)
f = urllib.urlopen("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=%s %s" % (keywordq, timeq))
# Build DOM for requested data
dom = parse(f)
f.close()
uris = []
if dom:
if dom.getElementsByTagName('Id'):
for item in dom.getElementsByTagName('Id'):
uris.append( 'pmid:%d' % int( item.childNodes[0].data ) )
uris = uris[:25] # Limit max number of subsequent requests
return uris
<commit_msg>Change publication date to Epub; more up-to-date<commit_after>import os.path
import string
import urllib, re
from datetime import datetime
from xml.dom.minidom import parse, parseString
# Django
from django.core import serializers
from django.conf import settings
from django.db import models
# Methodmint
def pubmed(keywords, latest_query=None):
# Get matching publications from Pubmed service
# We explode the keywords append [TW] for all text-search
# then build a string for the datetime since last update
keywordl = keywords.split(',')
keywordq = '(' + '[TW] '.join(keywordl) + '[TW])' # produce[TW] this[TW] string[TW]
if latest_query == None:
timeq = ''
else:
timeq = ' AND ("%s"[Date - Epub] : "3000"[Date - Epub])' % latest_query.strftime("%Y/%m/%d")
print "Querying pubmed with: %s %s" % (keywordq, timeq)
f = urllib.urlopen("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=%s %s" % (keywordq, timeq))
# Build DOM for requested data
dom = parse(f)
f.close()
uris = []
if dom:
if dom.getElementsByTagName('Id'):
for item in dom.getElementsByTagName('Id'):
uris.append( 'pmid:%d' % int( item.childNodes[0].data ) )
uris = uris[:25] # Limit max number of subsequent requests
return uris
|
d410a5295b67b17ca1cdc4d53ed8f776159278bc
|
json2parquet/__init__.py
|
json2parquet/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
Make client.write_parquet_dataset available for export
|
Make client.write_parquet_dataset available for export
This commit adds write_parquet_dataset to the imports from .client in
__init__.py
Previously, `from json2parquet import write_parquet_dataset` would
result in an error: `ImportError: cannot import name
'write_parquet_dataset' from 'json2parquet' `
|
Python
|
mit
|
andrewgross/json2parquet
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
Make client.write_parquet_dataset available for export
This commit adds write_parquet_dataset to the imports from .client in
__init__.py
Previously, `from json2parquet import write_parquet_dataset` would
result in an error: `ImportError: cannot import name
'write_parquet_dataset' from 'json2parquet' `
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
<commit_msg>Make client.write_parquet_dataset available for export
This commit adds write_parquet_dataset to the imports from .client in
__init__.py
Previously, `from json2parquet import write_parquet_dataset` would
result in an error: `ImportError: cannot import name
'write_parquet_dataset' from 'json2parquet' `<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
Make client.write_parquet_dataset available for export
This commit adds write_parquet_dataset to the imports from .client in
__init__.py
Previously, `from json2parquet import write_parquet_dataset` would
result in an error: `ImportError: cannot import name
'write_parquet_dataset' from 'json2parquet' `# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
<commit_msg>Make client.write_parquet_dataset available for export
This commit adds write_parquet_dataset to the imports from .client in
__init__.py
Previously, `from json2parquet import write_parquet_dataset` would
result in an error: `ImportError: cannot import name
'write_parquet_dataset' from 'json2parquet' `<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
d1a052237dc6fc5a8a198a130c203e823e86ccec
|
dist/docker/redhat/commandlineparser.py
|
dist/docker/redhat/commandlineparser.py
|
import argparse
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--developer-mode', default='1', choices=['0', '1'], dest='developerMode')
parser.add_argument('--seeds', default=None, help="specify seeds - if left empty will use container's own IP")
parser.add_argument('--cpuset', default=None, help="e.g. --cpuset 0-3 for the first four CPUs")
parser.add_argument('--smp', default=None, help="e.g --smp 2 to use two CPUs")
parser.add_argument('--memory', default=None, help="e.g. --memory 1G to use 1 GB of RAM")
parser.add_argument('--overprovisioned', default='0', choices=['0', '1'], help="run in overprovisined environment")
parser.add_argument('--broadcast-address', default=None, dest='broadcastAddress')
parser.add_argument('--broadcast-rpc-address', default=None, dest='broadcastRpcAddress')
return parser.parse_args()
|
import argparse
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--developer-mode', default='1', choices=['0', '1'], dest='developerMode')
parser.add_argument('--seeds', default=None, help="specify seeds - if left empty will use container's own IP")
parser.add_argument('--cpuset', default=None, help="e.g. --cpuset 0-3 for the first four CPUs")
parser.add_argument('--smp', default=None, help="e.g --smp 2 to use two CPUs")
parser.add_argument('--memory', default=None, help="e.g. --memory 1G to use 1 GB of RAM")
parser.add_argument('--overprovisioned', default='0', choices=['0', '1'], help="run in overprovisioned environment")
parser.add_argument('--broadcast-address', default=None, dest='broadcastAddress')
parser.add_argument('--broadcast-rpc-address', default=None, dest='broadcastRpcAddress')
return parser.parse_args()
|
Fix typo in "--overprovisioned" help text
|
dist/docker: Fix typo in "--overprovisioned" help text
Reported by Mathias Bogaert (@analytically).
Message-Id: <13c4d4f57d8c59965d44b353c9e1b869295d4df3@scylladb.com>
|
Python
|
agpl-3.0
|
avikivity/scylla,raphaelsc/scylla,avikivity/scylla,scylladb/scylla,scylladb/scylla,duarten/scylla,duarten/scylla,duarten/scylla,raphaelsc/scylla,avikivity/scylla,scylladb/scylla,scylladb/scylla,raphaelsc/scylla
|
import argparse
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--developer-mode', default='1', choices=['0', '1'], dest='developerMode')
parser.add_argument('--seeds', default=None, help="specify seeds - if left empty will use container's own IP")
parser.add_argument('--cpuset', default=None, help="e.g. --cpuset 0-3 for the first four CPUs")
parser.add_argument('--smp', default=None, help="e.g --smp 2 to use two CPUs")
parser.add_argument('--memory', default=None, help="e.g. --memory 1G to use 1 GB of RAM")
parser.add_argument('--overprovisioned', default='0', choices=['0', '1'], help="run in overprovisined environment")
parser.add_argument('--broadcast-address', default=None, dest='broadcastAddress')
parser.add_argument('--broadcast-rpc-address', default=None, dest='broadcastRpcAddress')
return parser.parse_args()
dist/docker: Fix typo in "--overprovisioned" help text
Reported by Mathias Bogaert (@analytically).
Message-Id: <13c4d4f57d8c59965d44b353c9e1b869295d4df3@scylladb.com>
|
import argparse
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--developer-mode', default='1', choices=['0', '1'], dest='developerMode')
parser.add_argument('--seeds', default=None, help="specify seeds - if left empty will use container's own IP")
parser.add_argument('--cpuset', default=None, help="e.g. --cpuset 0-3 for the first four CPUs")
parser.add_argument('--smp', default=None, help="e.g --smp 2 to use two CPUs")
parser.add_argument('--memory', default=None, help="e.g. --memory 1G to use 1 GB of RAM")
parser.add_argument('--overprovisioned', default='0', choices=['0', '1'], help="run in overprovisioned environment")
parser.add_argument('--broadcast-address', default=None, dest='broadcastAddress')
parser.add_argument('--broadcast-rpc-address', default=None, dest='broadcastRpcAddress')
return parser.parse_args()
|
<commit_before>import argparse
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--developer-mode', default='1', choices=['0', '1'], dest='developerMode')
parser.add_argument('--seeds', default=None, help="specify seeds - if left empty will use container's own IP")
parser.add_argument('--cpuset', default=None, help="e.g. --cpuset 0-3 for the first four CPUs")
parser.add_argument('--smp', default=None, help="e.g --smp 2 to use two CPUs")
parser.add_argument('--memory', default=None, help="e.g. --memory 1G to use 1 GB of RAM")
parser.add_argument('--overprovisioned', default='0', choices=['0', '1'], help="run in overprovisined environment")
parser.add_argument('--broadcast-address', default=None, dest='broadcastAddress')
parser.add_argument('--broadcast-rpc-address', default=None, dest='broadcastRpcAddress')
return parser.parse_args()
<commit_msg>dist/docker: Fix typo in "--overprovisioned" help text
Reported by Mathias Bogaert (@analytically).
Message-Id: <13c4d4f57d8c59965d44b353c9e1b869295d4df3@scylladb.com><commit_after>
|
import argparse
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--developer-mode', default='1', choices=['0', '1'], dest='developerMode')
parser.add_argument('--seeds', default=None, help="specify seeds - if left empty will use container's own IP")
parser.add_argument('--cpuset', default=None, help="e.g. --cpuset 0-3 for the first four CPUs")
parser.add_argument('--smp', default=None, help="e.g --smp 2 to use two CPUs")
parser.add_argument('--memory', default=None, help="e.g. --memory 1G to use 1 GB of RAM")
parser.add_argument('--overprovisioned', default='0', choices=['0', '1'], help="run in overprovisioned environment")
parser.add_argument('--broadcast-address', default=None, dest='broadcastAddress')
parser.add_argument('--broadcast-rpc-address', default=None, dest='broadcastRpcAddress')
return parser.parse_args()
|
import argparse
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--developer-mode', default='1', choices=['0', '1'], dest='developerMode')
parser.add_argument('--seeds', default=None, help="specify seeds - if left empty will use container's own IP")
parser.add_argument('--cpuset', default=None, help="e.g. --cpuset 0-3 for the first four CPUs")
parser.add_argument('--smp', default=None, help="e.g --smp 2 to use two CPUs")
parser.add_argument('--memory', default=None, help="e.g. --memory 1G to use 1 GB of RAM")
parser.add_argument('--overprovisioned', default='0', choices=['0', '1'], help="run in overprovisined environment")
parser.add_argument('--broadcast-address', default=None, dest='broadcastAddress')
parser.add_argument('--broadcast-rpc-address', default=None, dest='broadcastRpcAddress')
return parser.parse_args()
dist/docker: Fix typo in "--overprovisioned" help text
Reported by Mathias Bogaert (@analytically).
Message-Id: <13c4d4f57d8c59965d44b353c9e1b869295d4df3@scylladb.com>import argparse
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--developer-mode', default='1', choices=['0', '1'], dest='developerMode')
parser.add_argument('--seeds', default=None, help="specify seeds - if left empty will use container's own IP")
parser.add_argument('--cpuset', default=None, help="e.g. --cpuset 0-3 for the first four CPUs")
parser.add_argument('--smp', default=None, help="e.g --smp 2 to use two CPUs")
parser.add_argument('--memory', default=None, help="e.g. --memory 1G to use 1 GB of RAM")
parser.add_argument('--overprovisioned', default='0', choices=['0', '1'], help="run in overprovisioned environment")
parser.add_argument('--broadcast-address', default=None, dest='broadcastAddress')
parser.add_argument('--broadcast-rpc-address', default=None, dest='broadcastRpcAddress')
return parser.parse_args()
|
<commit_before>import argparse
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--developer-mode', default='1', choices=['0', '1'], dest='developerMode')
parser.add_argument('--seeds', default=None, help="specify seeds - if left empty will use container's own IP")
parser.add_argument('--cpuset', default=None, help="e.g. --cpuset 0-3 for the first four CPUs")
parser.add_argument('--smp', default=None, help="e.g --smp 2 to use two CPUs")
parser.add_argument('--memory', default=None, help="e.g. --memory 1G to use 1 GB of RAM")
parser.add_argument('--overprovisioned', default='0', choices=['0', '1'], help="run in overprovisined environment")
parser.add_argument('--broadcast-address', default=None, dest='broadcastAddress')
parser.add_argument('--broadcast-rpc-address', default=None, dest='broadcastRpcAddress')
return parser.parse_args()
<commit_msg>dist/docker: Fix typo in "--overprovisioned" help text
Reported by Mathias Bogaert (@analytically).
Message-Id: <13c4d4f57d8c59965d44b353c9e1b869295d4df3@scylladb.com><commit_after>import argparse
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--developer-mode', default='1', choices=['0', '1'], dest='developerMode')
parser.add_argument('--seeds', default=None, help="specify seeds - if left empty will use container's own IP")
parser.add_argument('--cpuset', default=None, help="e.g. --cpuset 0-3 for the first four CPUs")
parser.add_argument('--smp', default=None, help="e.g --smp 2 to use two CPUs")
parser.add_argument('--memory', default=None, help="e.g. --memory 1G to use 1 GB of RAM")
parser.add_argument('--overprovisioned', default='0', choices=['0', '1'], help="run in overprovisioned environment")
parser.add_argument('--broadcast-address', default=None, dest='broadcastAddress')
parser.add_argument('--broadcast-rpc-address', default=None, dest='broadcastRpcAddress')
return parser.parse_args()
|
3d570864a39f10d6e502e4005e7931793fca3d01
|
flask_app/models.py
|
flask_app/models.py
|
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
|
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id', ondelete='CASCADE')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id', ondelete='CASCADE')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
|
Add missing cascade deletes on user/roles
|
Add missing cascade deletes on user/roles
|
Python
|
mit
|
getslash/mailboxer,vmalloc/mailboxer,Infinidat/lanister,vmalloc/mailboxer,getslash/mailboxer,Infinidat/lanister,vmalloc/mailboxer,getslash/mailboxer
|
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
Add missing cascade deletes on user/roles
|
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id', ondelete='CASCADE')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id', ondelete='CASCADE')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
|
<commit_before>from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
<commit_msg>Add missing cascade deletes on user/roles<commit_after>
|
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id', ondelete='CASCADE')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id', ondelete='CASCADE')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
|
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
Add missing cascade deletes on user/rolesfrom flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id', ondelete='CASCADE')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id', ondelete='CASCADE')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
|
<commit_before>from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
<commit_msg>Add missing cascade deletes on user/roles<commit_after>from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id', ondelete='CASCADE')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id', ondelete='CASCADE')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
|
5a2d0612f0d3417b07f007a87febdb045bff67e4
|
astropy/units/format/unicode_format.py
|
astropy/units/format/unicode_format.py
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Unicode" unit format.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import console
class Unicode(console.Console):
"""
Output-only format for to display pretty formatting at the console
using Unicode characters.
For example::
>>> print u.Ry.irrep().to_string('unicode')
m² kg
2.18×10-¹⁸ ─────
s²
"""
def __init__(self):
pass
_times = "×"
_line = "─"
def _get_unit_name(self, unit):
return unit.get_format_name('unicode')
@staticmethod
def _format_superscript(number):
mapping = {
'0': '⁰',
'1': '¹',
'2': '²',
'3': '³',
'4': '⁴',
'5': '⁵',
'6': '⁶',
'7': '⁷',
'8': '⁸',
'9': '⁹',
'-': '⁻'}
output = []
for c in number:
output.append(mapping[c])
return ''.join(output)
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Unicode" unit format.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import console
class Unicode(console.Console):
"""
Output-only format for to display pretty formatting at the console
using Unicode characters.
For example::
>>> print u.Ry.decompose().to_string('unicode')
m² kg
2.18×10-¹⁸ ─────
s²
"""
def __init__(self):
pass
_times = "×"
_line = "─"
def _get_unit_name(self, unit):
return unit.get_format_name('unicode')
@staticmethod
def _format_superscript(number):
mapping = {
'0': '⁰',
'1': '¹',
'2': '²',
'3': '³',
'4': '⁴',
'5': '⁵',
'6': '⁶',
'7': '⁷',
'8': '⁸',
'9': '⁹',
'-': '⁻'}
output = []
for c in number:
output.append(mapping[c])
return ''.join(output)
|
Update renamed method (irrep -> decompose)
|
Update renamed method (irrep -> decompose)
|
Python
|
bsd-3-clause
|
kelle/astropy,lpsinger/astropy,MSeifert04/astropy,pllim/astropy,mhvk/astropy,mhvk/astropy,tbabej/astropy,kelle/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,saimn/astropy,astropy/astropy,DougBurke/astropy,lpsinger/astropy,DougBurke/astropy,pllim/astropy,larrybradley/astropy,larrybradley/astropy,funbaker/astropy,tbabej/astropy,AustereCuriosity/astropy,joergdietrich/astropy,bsipocz/astropy,funbaker/astropy,joergdietrich/astropy,stargaser/astropy,MSeifert04/astropy,MSeifert04/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,astropy/astropy,saimn/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,astropy/astropy,tbabej/astropy,kelle/astropy,joergdietrich/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,mhvk/astropy,aleksandr-bakanov/astropy,mhvk/astropy,StuartLittlefair/astropy,pllim/astropy,bsipocz/astropy,StuartLittlefair/astropy,dhomeier/astropy,larrybradley/astropy,tbabej/astropy,pllim/astropy,bsipocz/astropy,DougBurke/astropy,stargaser/astropy,joergdietrich/astropy,kelle/astropy,saimn/astropy,saimn/astropy,funbaker/astropy,joergdietrich/astropy,DougBurke/astropy,tbabej/astropy,AustereCuriosity/astropy,bsipocz/astropy,AustereCuriosity/astropy,dhomeier/astropy,kelle/astropy,larrybradley/astropy,stargaser/astropy,astropy/astropy,mhvk/astropy,funbaker/astropy,pllim/astropy,saimn/astropy,dhomeier/astropy,stargaser/astropy,lpsinger/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,lpsinger/astropy,dhomeier/astropy,lpsinger/astropy,astropy/astropy
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Unicode" unit format.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import console
class Unicode(console.Console):
"""
Output-only format for to display pretty formatting at the console
using Unicode characters.
For example::
>>> print u.Ry.irrep().to_string('unicode')
m² kg
2.18×10-¹⁸ ─────
s²
"""
def __init__(self):
pass
_times = "×"
_line = "─"
def _get_unit_name(self, unit):
return unit.get_format_name('unicode')
@staticmethod
def _format_superscript(number):
mapping = {
'0': '⁰',
'1': '¹',
'2': '²',
'3': '³',
'4': '⁴',
'5': '⁵',
'6': '⁶',
'7': '⁷',
'8': '⁸',
'9': '⁹',
'-': '⁻'}
output = []
for c in number:
output.append(mapping[c])
return ''.join(output)
Update renamed method (irrep -> decompose)
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Unicode" unit format.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import console
class Unicode(console.Console):
"""
Output-only format for to display pretty formatting at the console
using Unicode characters.
For example::
>>> print u.Ry.decompose().to_string('unicode')
m² kg
2.18×10-¹⁸ ─────
s²
"""
def __init__(self):
pass
_times = "×"
_line = "─"
def _get_unit_name(self, unit):
return unit.get_format_name('unicode')
@staticmethod
def _format_superscript(number):
mapping = {
'0': '⁰',
'1': '¹',
'2': '²',
'3': '³',
'4': '⁴',
'5': '⁵',
'6': '⁶',
'7': '⁷',
'8': '⁸',
'9': '⁹',
'-': '⁻'}
output = []
for c in number:
output.append(mapping[c])
return ''.join(output)
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Unicode" unit format.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import console
class Unicode(console.Console):
"""
Output-only format for to display pretty formatting at the console
using Unicode characters.
For example::
>>> print u.Ry.irrep().to_string('unicode')
m² kg
2.18×10-¹⁸ ─────
s²
"""
def __init__(self):
pass
_times = "×"
_line = "─"
def _get_unit_name(self, unit):
return unit.get_format_name('unicode')
@staticmethod
def _format_superscript(number):
mapping = {
'0': '⁰',
'1': '¹',
'2': '²',
'3': '³',
'4': '⁴',
'5': '⁵',
'6': '⁶',
'7': '⁷',
'8': '⁸',
'9': '⁹',
'-': '⁻'}
output = []
for c in number:
output.append(mapping[c])
return ''.join(output)
<commit_msg>Update renamed method (irrep -> decompose)<commit_after>
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Unicode" unit format.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import console
class Unicode(console.Console):
"""
Output-only format for to display pretty formatting at the console
using Unicode characters.
For example::
>>> print u.Ry.decompose().to_string('unicode')
m² kg
2.18×10-¹⁸ ─────
s²
"""
def __init__(self):
pass
_times = "×"
_line = "─"
def _get_unit_name(self, unit):
return unit.get_format_name('unicode')
@staticmethod
def _format_superscript(number):
mapping = {
'0': '⁰',
'1': '¹',
'2': '²',
'3': '³',
'4': '⁴',
'5': '⁵',
'6': '⁶',
'7': '⁷',
'8': '⁸',
'9': '⁹',
'-': '⁻'}
output = []
for c in number:
output.append(mapping[c])
return ''.join(output)
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Unicode" unit format.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import console
class Unicode(console.Console):
"""
Output-only format for to display pretty formatting at the console
using Unicode characters.
For example::
>>> print u.Ry.irrep().to_string('unicode')
m² kg
2.18×10-¹⁸ ─────
s²
"""
def __init__(self):
pass
_times = "×"
_line = "─"
def _get_unit_name(self, unit):
return unit.get_format_name('unicode')
@staticmethod
def _format_superscript(number):
mapping = {
'0': '⁰',
'1': '¹',
'2': '²',
'3': '³',
'4': '⁴',
'5': '⁵',
'6': '⁶',
'7': '⁷',
'8': '⁸',
'9': '⁹',
'-': '⁻'}
output = []
for c in number:
output.append(mapping[c])
return ''.join(output)
Update renamed method (irrep -> decompose)# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Unicode" unit format.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import console
class Unicode(console.Console):
"""
Output-only format for to display pretty formatting at the console
using Unicode characters.
For example::
>>> print u.Ry.decompose().to_string('unicode')
m² kg
2.18×10-¹⁸ ─────
s²
"""
def __init__(self):
pass
_times = "×"
_line = "─"
def _get_unit_name(self, unit):
return unit.get_format_name('unicode')
@staticmethod
def _format_superscript(number):
mapping = {
'0': '⁰',
'1': '¹',
'2': '²',
'3': '³',
'4': '⁴',
'5': '⁵',
'6': '⁶',
'7': '⁷',
'8': '⁸',
'9': '⁹',
'-': '⁻'}
output = []
for c in number:
output.append(mapping[c])
return ''.join(output)
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Unicode" unit format.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import console
class Unicode(console.Console):
"""
Output-only format for to display pretty formatting at the console
using Unicode characters.
For example::
>>> print u.Ry.irrep().to_string('unicode')
m² kg
2.18×10-¹⁸ ─────
s²
"""
def __init__(self):
pass
_times = "×"
_line = "─"
def _get_unit_name(self, unit):
return unit.get_format_name('unicode')
@staticmethod
def _format_superscript(number):
mapping = {
'0': '⁰',
'1': '¹',
'2': '²',
'3': '³',
'4': '⁴',
'5': '⁵',
'6': '⁶',
'7': '⁷',
'8': '⁸',
'9': '⁹',
'-': '⁻'}
output = []
for c in number:
output.append(mapping[c])
return ''.join(output)
<commit_msg>Update renamed method (irrep -> decompose)<commit_after># -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Unicode" unit format.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import console
class Unicode(console.Console):
"""
Output-only format for to display pretty formatting at the console
using Unicode characters.
For example::
>>> print u.Ry.decompose().to_string('unicode')
m² kg
2.18×10-¹⁸ ─────
s²
"""
def __init__(self):
pass
_times = "×"
_line = "─"
def _get_unit_name(self, unit):
return unit.get_format_name('unicode')
@staticmethod
def _format_superscript(number):
mapping = {
'0': '⁰',
'1': '¹',
'2': '²',
'3': '³',
'4': '⁴',
'5': '⁵',
'6': '⁶',
'7': '⁷',
'8': '⁸',
'9': '⁹',
'-': '⁻'}
output = []
for c in number:
output.append(mapping[c])
return ''.join(output)
|
e2bb78a1587b7d5c0416c3632ca9674339826d55
|
src/yawf/creation.py
|
src/yawf/creation.py
|
from django.db import transaction
from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR
from yawf import get_workflow, get_workflow_by_instance
from yawf import dispatch
from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError
@transaction.commit_on_success
def create(workflow_type, sender, raw_parameters):
workflow = get_workflow(workflow_type)
if workflow is None:
raise WorkflowNotLoadedError(workflow_type)
form = workflow.create_form_cls(raw_parameters)
if form.is_valid():
instance = workflow.instance_fabric(sender, form.cleaned_data)
# Ensure that we will create, not update
instance.id = None
# Set workflow type
setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type)
instance.save()
workflow.post_create_hook(sender, form.cleaned_data, instance)
return instance
else:
raise CreateValidationError(form.errors)
def start_workflow(obj, sender, start_message_params):
workflow = get_workflow_by_instance(obj)
if isinstance(workflow.start_workflow, basestring):
return dispatch.dispatch(obj, sender, workflow.start_workflow)
elif callable(workflow.start_workflow):
start_message_id = workflow.start_workflow(obj, sender)
return dispatch.dispatch(obj, sender, start_message_id,
start_message_params)
else:
return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
|
from django.db import transaction
from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR
from yawf import get_workflow, get_workflow_by_instance
from yawf import dispatch
from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError
@transaction.commit_on_success
def create(workflow_type, sender, raw_parameters):
workflow = get_workflow(workflow_type)
if workflow is None:
raise WorkflowNotLoadedError(workflow_type)
form = workflow.create_form_cls(raw_parameters)
if form.is_valid():
instance = workflow.instance_fabric(sender, form.cleaned_data)
# Ensure that we will create, not update
instance.id = None
# Set workflow type
setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type)
instance.save()
workflow.post_create_hook(sender, form.cleaned_data, instance)
return instance
else:
raise CreateValidationError(form.errors)
def start_workflow(obj, sender, start_message_params=None):
if start_message_params is None:
start_message_params = {}
workflow = get_workflow_by_instance(obj)
if isinstance(workflow.start_workflow, basestring):
return dispatch.dispatch(obj, sender, workflow.start_workflow)
elif callable(workflow.start_workflow):
start_message_id = workflow.start_workflow(obj, sender)
return dispatch.dispatch(obj, sender, start_message_id,
start_message_params)
else:
return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
|
Make start_message_params optional in start_workflow()
|
Make start_message_params optional in start_workflow()
|
Python
|
mit
|
freevoid/yawf
|
from django.db import transaction
from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR
from yawf import get_workflow, get_workflow_by_instance
from yawf import dispatch
from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError
@transaction.commit_on_success
def create(workflow_type, sender, raw_parameters):
workflow = get_workflow(workflow_type)
if workflow is None:
raise WorkflowNotLoadedError(workflow_type)
form = workflow.create_form_cls(raw_parameters)
if form.is_valid():
instance = workflow.instance_fabric(sender, form.cleaned_data)
# Ensure that we will create, not update
instance.id = None
# Set workflow type
setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type)
instance.save()
workflow.post_create_hook(sender, form.cleaned_data, instance)
return instance
else:
raise CreateValidationError(form.errors)
def start_workflow(obj, sender, start_message_params):
workflow = get_workflow_by_instance(obj)
if isinstance(workflow.start_workflow, basestring):
return dispatch.dispatch(obj, sender, workflow.start_workflow)
elif callable(workflow.start_workflow):
start_message_id = workflow.start_workflow(obj, sender)
return dispatch.dispatch(obj, sender, start_message_id,
start_message_params)
else:
return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
Make start_message_params optional in start_workflow()
|
from django.db import transaction
from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR
from yawf import get_workflow, get_workflow_by_instance
from yawf import dispatch
from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError
@transaction.commit_on_success
def create(workflow_type, sender, raw_parameters):
workflow = get_workflow(workflow_type)
if workflow is None:
raise WorkflowNotLoadedError(workflow_type)
form = workflow.create_form_cls(raw_parameters)
if form.is_valid():
instance = workflow.instance_fabric(sender, form.cleaned_data)
# Ensure that we will create, not update
instance.id = None
# Set workflow type
setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type)
instance.save()
workflow.post_create_hook(sender, form.cleaned_data, instance)
return instance
else:
raise CreateValidationError(form.errors)
def start_workflow(obj, sender, start_message_params=None):
if start_message_params is None:
start_message_params = {}
workflow = get_workflow_by_instance(obj)
if isinstance(workflow.start_workflow, basestring):
return dispatch.dispatch(obj, sender, workflow.start_workflow)
elif callable(workflow.start_workflow):
start_message_id = workflow.start_workflow(obj, sender)
return dispatch.dispatch(obj, sender, start_message_id,
start_message_params)
else:
return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
|
<commit_before>from django.db import transaction
from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR
from yawf import get_workflow, get_workflow_by_instance
from yawf import dispatch
from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError
@transaction.commit_on_success
def create(workflow_type, sender, raw_parameters):
workflow = get_workflow(workflow_type)
if workflow is None:
raise WorkflowNotLoadedError(workflow_type)
form = workflow.create_form_cls(raw_parameters)
if form.is_valid():
instance = workflow.instance_fabric(sender, form.cleaned_data)
# Ensure that we will create, not update
instance.id = None
# Set workflow type
setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type)
instance.save()
workflow.post_create_hook(sender, form.cleaned_data, instance)
return instance
else:
raise CreateValidationError(form.errors)
def start_workflow(obj, sender, start_message_params):
workflow = get_workflow_by_instance(obj)
if isinstance(workflow.start_workflow, basestring):
return dispatch.dispatch(obj, sender, workflow.start_workflow)
elif callable(workflow.start_workflow):
start_message_id = workflow.start_workflow(obj, sender)
return dispatch.dispatch(obj, sender, start_message_id,
start_message_params)
else:
return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
<commit_msg>Make start_message_params optional in start_workflow()<commit_after>
|
from django.db import transaction
from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR
from yawf import get_workflow, get_workflow_by_instance
from yawf import dispatch
from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError
@transaction.commit_on_success
def create(workflow_type, sender, raw_parameters):
workflow = get_workflow(workflow_type)
if workflow is None:
raise WorkflowNotLoadedError(workflow_type)
form = workflow.create_form_cls(raw_parameters)
if form.is_valid():
instance = workflow.instance_fabric(sender, form.cleaned_data)
# Ensure that we will create, not update
instance.id = None
# Set workflow type
setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type)
instance.save()
workflow.post_create_hook(sender, form.cleaned_data, instance)
return instance
else:
raise CreateValidationError(form.errors)
def start_workflow(obj, sender, start_message_params=None):
if start_message_params is None:
start_message_params = {}
workflow = get_workflow_by_instance(obj)
if isinstance(workflow.start_workflow, basestring):
return dispatch.dispatch(obj, sender, workflow.start_workflow)
elif callable(workflow.start_workflow):
start_message_id = workflow.start_workflow(obj, sender)
return dispatch.dispatch(obj, sender, start_message_id,
start_message_params)
else:
return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
|
from django.db import transaction
from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR
from yawf import get_workflow, get_workflow_by_instance
from yawf import dispatch
from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError
@transaction.commit_on_success
def create(workflow_type, sender, raw_parameters):
workflow = get_workflow(workflow_type)
if workflow is None:
raise WorkflowNotLoadedError(workflow_type)
form = workflow.create_form_cls(raw_parameters)
if form.is_valid():
instance = workflow.instance_fabric(sender, form.cleaned_data)
# Ensure that we will create, not update
instance.id = None
# Set workflow type
setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type)
instance.save()
workflow.post_create_hook(sender, form.cleaned_data, instance)
return instance
else:
raise CreateValidationError(form.errors)
def start_workflow(obj, sender, start_message_params):
workflow = get_workflow_by_instance(obj)
if isinstance(workflow.start_workflow, basestring):
return dispatch.dispatch(obj, sender, workflow.start_workflow)
elif callable(workflow.start_workflow):
start_message_id = workflow.start_workflow(obj, sender)
return dispatch.dispatch(obj, sender, start_message_id,
start_message_params)
else:
return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
Make start_message_params optional in start_workflow()from django.db import transaction
from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR
from yawf import get_workflow, get_workflow_by_instance
from yawf import dispatch
from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError
@transaction.commit_on_success
def create(workflow_type, sender, raw_parameters):
workflow = get_workflow(workflow_type)
if workflow is None:
raise WorkflowNotLoadedError(workflow_type)
form = workflow.create_form_cls(raw_parameters)
if form.is_valid():
instance = workflow.instance_fabric(sender, form.cleaned_data)
# Ensure that we will create, not update
instance.id = None
# Set workflow type
setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type)
instance.save()
workflow.post_create_hook(sender, form.cleaned_data, instance)
return instance
else:
raise CreateValidationError(form.errors)
def start_workflow(obj, sender, start_message_params=None):
if start_message_params is None:
start_message_params = {}
workflow = get_workflow_by_instance(obj)
if isinstance(workflow.start_workflow, basestring):
return dispatch.dispatch(obj, sender, workflow.start_workflow)
elif callable(workflow.start_workflow):
start_message_id = workflow.start_workflow(obj, sender)
return dispatch.dispatch(obj, sender, start_message_id,
start_message_params)
else:
return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
|
<commit_before>from django.db import transaction
from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR
from yawf import get_workflow, get_workflow_by_instance
from yawf import dispatch
from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError
@transaction.commit_on_success
def create(workflow_type, sender, raw_parameters):
workflow = get_workflow(workflow_type)
if workflow is None:
raise WorkflowNotLoadedError(workflow_type)
form = workflow.create_form_cls(raw_parameters)
if form.is_valid():
instance = workflow.instance_fabric(sender, form.cleaned_data)
# Ensure that we will create, not update
instance.id = None
# Set workflow type
setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type)
instance.save()
workflow.post_create_hook(sender, form.cleaned_data, instance)
return instance
else:
raise CreateValidationError(form.errors)
def start_workflow(obj, sender, start_message_params):
workflow = get_workflow_by_instance(obj)
if isinstance(workflow.start_workflow, basestring):
return dispatch.dispatch(obj, sender, workflow.start_workflow)
elif callable(workflow.start_workflow):
start_message_id = workflow.start_workflow(obj, sender)
return dispatch.dispatch(obj, sender, start_message_id,
start_message_params)
else:
return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
<commit_msg>Make start_message_params optional in start_workflow()<commit_after>from django.db import transaction
from yawf.config import DEFAULT_START_MESSAGE, WORKFLOW_TYPE_ATTR
from yawf import get_workflow, get_workflow_by_instance
from yawf import dispatch
from yawf.exceptions import WorkflowNotLoadedError, CreateValidationError
@transaction.commit_on_success
def create(workflow_type, sender, raw_parameters):
workflow = get_workflow(workflow_type)
if workflow is None:
raise WorkflowNotLoadedError(workflow_type)
form = workflow.create_form_cls(raw_parameters)
if form.is_valid():
instance = workflow.instance_fabric(sender, form.cleaned_data)
# Ensure that we will create, not update
instance.id = None
# Set workflow type
setattr(instance, WORKFLOW_TYPE_ATTR, workflow_type)
instance.save()
workflow.post_create_hook(sender, form.cleaned_data, instance)
return instance
else:
raise CreateValidationError(form.errors)
def start_workflow(obj, sender, start_message_params=None):
if start_message_params is None:
start_message_params = {}
workflow = get_workflow_by_instance(obj)
if isinstance(workflow.start_workflow, basestring):
return dispatch.dispatch(obj, sender, workflow.start_workflow)
elif callable(workflow.start_workflow):
start_message_id = workflow.start_workflow(obj, sender)
return dispatch.dispatch(obj, sender, start_message_id,
start_message_params)
else:
return dispatch.dispatch(obj, sender, DEFAULT_START_MESSAGE)
|
f8bdd7c8139cfc6d7af4bb3d89e983073db976bf
|
mecodesktop.py
|
mecodesktop.py
|
"""
MacroecoDesktop script for making standalone executable
"""
from macroeco import desktop
desktop()
|
"""
MacroecoDesktop script for making standalone executable
"""
import sys as _sys
from macroeco import desktop
if len(_sys.argv) > 1:
desktop(_sys.argv[1])
else:
desktop()
|
Allow compiled OS X app to take parameter file as input on command line
|
Allow compiled OS X app to take parameter file as input on command line
|
Python
|
bsd-2-clause
|
jkitzes/macroeco
|
"""
MacroecoDesktop script for making standalone executable
"""
from macroeco import desktop
desktop()
Allow compiled OS X app to take parameter file as input on command line
|
"""
MacroecoDesktop script for making standalone executable
"""
import sys as _sys
from macroeco import desktop
if len(_sys.argv) > 1:
desktop(_sys.argv[1])
else:
desktop()
|
<commit_before>"""
MacroecoDesktop script for making standalone executable
"""
from macroeco import desktop
desktop()
<commit_msg>Allow compiled OS X app to take parameter file as input on command line<commit_after>
|
"""
MacroecoDesktop script for making standalone executable
"""
import sys as _sys
from macroeco import desktop
if len(_sys.argv) > 1:
desktop(_sys.argv[1])
else:
desktop()
|
"""
MacroecoDesktop script for making standalone executable
"""
from macroeco import desktop
desktop()
Allow compiled OS X app to take parameter file as input on command line"""
MacroecoDesktop script for making standalone executable
"""
import sys as _sys
from macroeco import desktop
if len(_sys.argv) > 1:
desktop(_sys.argv[1])
else:
desktop()
|
<commit_before>"""
MacroecoDesktop script for making standalone executable
"""
from macroeco import desktop
desktop()
<commit_msg>Allow compiled OS X app to take parameter file as input on command line<commit_after>"""
MacroecoDesktop script for making standalone executable
"""
import sys as _sys
from macroeco import desktop
if len(_sys.argv) > 1:
desktop(_sys.argv[1])
else:
desktop()
|
71802047e6ee0226f7eaf27f5e3497aea4cd6164
|
testing/cloudControllerLocustTester.py
|
testing/cloudControllerLocustTester.py
|
from locust import HttpLocust, TaskSet, task
class WebsiteTasks(TaskSet):
@task
def index(self):
self.client.get("/service")
class WebsiteUser(HttpLocust):
task_set = WebsiteTasks
min_wait = 1000
max_wait = 1000
|
from locust import HttpLocust, TaskSet, task
class WebsiteTasks(TaskSet):
@task
def index(self):
self.client.get("/service")
class WebsiteUser(HttpLocust):
task_set = WebsiteTasks
min_wait = 1000
max_wait = 1500
|
Update max wait time in locust test script.
|
Update max wait time in locust test script.
|
Python
|
apache-2.0
|
IrimieBogdan/cloud-controller,IrimieBogdan/cloud-controller
|
from locust import HttpLocust, TaskSet, task
class WebsiteTasks(TaskSet):
@task
def index(self):
self.client.get("/service")
class WebsiteUser(HttpLocust):
task_set = WebsiteTasks
min_wait = 1000
max_wait = 1000
Update max wait time in locust test script.
|
from locust import HttpLocust, TaskSet, task
class WebsiteTasks(TaskSet):
@task
def index(self):
self.client.get("/service")
class WebsiteUser(HttpLocust):
task_set = WebsiteTasks
min_wait = 1000
max_wait = 1500
|
<commit_before>from locust import HttpLocust, TaskSet, task
class WebsiteTasks(TaskSet):
@task
def index(self):
self.client.get("/service")
class WebsiteUser(HttpLocust):
task_set = WebsiteTasks
min_wait = 1000
max_wait = 1000
<commit_msg>Update max wait time in locust test script.<commit_after>
|
from locust import HttpLocust, TaskSet, task
class WebsiteTasks(TaskSet):
@task
def index(self):
self.client.get("/service")
class WebsiteUser(HttpLocust):
task_set = WebsiteTasks
min_wait = 1000
max_wait = 1500
|
from locust import HttpLocust, TaskSet, task
class WebsiteTasks(TaskSet):
@task
def index(self):
self.client.get("/service")
class WebsiteUser(HttpLocust):
task_set = WebsiteTasks
min_wait = 1000
max_wait = 1000
Update max wait time in locust test script.from locust import HttpLocust, TaskSet, task
class WebsiteTasks(TaskSet):
@task
def index(self):
self.client.get("/service")
class WebsiteUser(HttpLocust):
task_set = WebsiteTasks
min_wait = 1000
max_wait = 1500
|
<commit_before>from locust import HttpLocust, TaskSet, task
class WebsiteTasks(TaskSet):
@task
def index(self):
self.client.get("/service")
class WebsiteUser(HttpLocust):
task_set = WebsiteTasks
min_wait = 1000
max_wait = 1000
<commit_msg>Update max wait time in locust test script.<commit_after>from locust import HttpLocust, TaskSet, task
class WebsiteTasks(TaskSet):
@task
def index(self):
self.client.get("/service")
class WebsiteUser(HttpLocust):
task_set = WebsiteTasks
min_wait = 1000
max_wait = 1500
|
59e4e193ea41d05229f2748743e9783d68d8dabf
|
apps/__init__.py
|
apps/__init__.py
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
Handle application erroring to not break the server
|
Handle application erroring to not break the server
|
Python
|
agpl-3.0
|
sociam/indx,sociam/indx,sociam/indx
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
Handle application erroring to not break the server
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
<commit_before>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
<commit_msg>Handle application erroring to not break the server<commit_after>
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
Handle application erroring to not break the server## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
<commit_before>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
<commit_msg>Handle application erroring to not break the server<commit_after>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
5510814b2d186c6bf6d1c8af96eab16302e1675f
|
test/library/gyptest-shared-obj-install-path.py
|
test/library/gyptest-shared-obj-install-path.py
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
Add with_statement import for python2.5.
|
Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003
git-svn-id: e7e1075985beda50ea81ac4472467b4f6e91fc78@863 78cadc50-ecff-11dd-a971-7dbc132099af
|
Python
|
bsd-3-clause
|
bnq4ever/gypgoogle,sport-monkey/GYP,chromium/gyp,trafi/gyp,xin3liang/platform_external_chromium_org_tools_gyp,pyokagan/gyp,Danath/gyp,luvit/gyp,tarc/gyp,MIPS/external-chromium_org-tools-gyp,trafi/gyp,mgamer/gyp,springmeyer/gyp,ttyangf/pdfium_gyp,trafi/gyp,duanhjlt/gyp,kevinchen3315/gyp-git,pandaxcl/gyp,turbulenz/gyp,carlTLR/gyp,MIPS/external-chromium_org-tools-gyp,cysp/gyp,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,lianliuwei/gyp,sport-monkey/GYP,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,omasanori/gyp,svn2github/kgyp,bnoordhuis/gyp,chromium/gyp,Phuehvk/gyp,ttyangf/gyp,pyokagan/gyp,cchamberlain/gyp,bulldy80/gyp_unofficial,omasanori/gyp,omasanori/gyp,lianliuwei/gyp,mistydemeo/gyp,luvit/gyp,bdarnell/gyp,mumble-voip/libmumble-gyp,sloanyang/gyp,pyokagan/gyp,Danath/gyp,Chilledheart/gyp,mapbox/gyp,svn2github/kgyp,kevinchen3315/gyp-git,AWhetter/gyp,ttyangf/gyp,channing/gyp,svn2github/gyp,sanyaade-teachings/gyp,Omegaphora/external_chromium_org_tools_gyp,amoikevin/gyp,yangrongwei/gyp,duanhjlt/gyp,springmeyer/gyp,sdklite/gyp,tarc/gyp,mgamer/gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,msc-/gyp,turbulenz/gyp,duanhjlt/gyp,yinquan529/platform-external-chromium_org-tools-gyp,enkripsi/gyp,adblockplus/gyp,mapbox/gyp,Jack-Q/GYP-copy,svn2github/kgyp,ttyangf/pdfium_gyp,mgamer/gyp,bpsinc-native/src_tools_gyp,erikge/watch_gyp,pandaxcl/gyp,bnoordhuis/gyp,chromium/gyp,ryfx/gyp,cchamberlain/gyp,sdklite/gyp,alexcrichton/gyp,svn2github/gyp,erikge/watch_gyp,adblockplus/gyp,lianliuwei/gyp,msc-/gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,svn2github/kgyp,bdarnell/gyp,mapbox/gyp,saghul/gyn,adblockplus/gyp,Chilledheart/gyp,azunite/gyp,yjhjstz/gyp,Phuehvk/gyp,AWhetter/gyp,bnoordhuis/gyp,sanyaade-teachings/gyp,azunite/gyp,duanhjlt/gyp,lukeweber/gyp-override,clar/gyp,lukeweber/gyp-override,trafi/gyp,mistydemeo/gyp,openpeer/webrtc-gyp,bnq4ever/gypgoogle,omasanori/gyp,lukeweber/gyp-override,Omegaphora/external_chromium_org_tools_gyp,adblockplus/gyp,azunite/gyp_20150930,chromium/gyp,mgamer/gyp,azunite/gyp_20150930,Jack-Q/GYP-copy,brson/gyp,turbulenz/gyp,okwasi/gyp,azunite/gyp,saghul/gyn,kevinchen3315/gyp-git,dougbeal/gyp,channing/gyp,carlTLR/gyp,dougbeal/gyp,kevinchen3315/gyp-git,brson/gyp,mumble-voip/libmumble-gyp,alexcrichton/gyp,xin3liang/platform_external_chromium_org_tools_gyp,pandaxcl/gyp,ryfx/gyp,clar/gyp,AWhetter/gyp,brson/gyp,ryfx/gyp,channing/gyp,sdklite/gyp,svn2github/gyp,cchamberlain/gyp,okumura/gyp,bpsinc-native/src_tools_gyp,Phuehvk/gyp,openpeer/webrtc-gyp,cchamberlain/gyp,azunite/gyp,sanyaade-teachings/gyp,turbulenz/gyp,ttyangf/gyp,bdarnell/gyp,alexcrichton/gyp,bdarnell/gyp,pyokagan/gyp,azunite/gyp_20150930,mkrautz/gyp-libmumble,alexcrichton/gyp,azunite/gyp_20150930,mapbox/gyp,erikge/watch_gyp,saghul/gyn,tarc/gyp,xin3liang/platform_external_chromium_org_tools_gyp,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,amoikevin/gyp,mumble-voip/libmumble-gyp,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,AOSPU/external_chromium_org_tools_gyp,dougbeal/gyp,pyokagan/gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,brson/gyp,azunite/gyp_20150930,bnoordhuis/gyp,Phuehvk/gyp,yinquan529/platform-external-chromium_org-tools-gyp,mkrautz/gyp-libmumble,erikge/watch_gyp,xin3liang/platform_external_chromium_org_tools_gyp,mumble-voip/libmumble-gyp,sdklite/gyp,Chilledheart/gyp,lukeweber/gyp-override,carlTLR/gyp,lianliuwei/gyp,MIPS/external-chromium_org-tools-gyp,clar/gyp,tarc/gyp,Chilledheart/gyp,yangrongwei/gyp,msc-/gyp,channing/gyp,yjhjstz/gyp,bnq4ever/gypgoogle,springmeyer/gyp,duanhjlt/gyp,mkrautz/gyp-libmumble,yjhjstz/gyp,ttyangf/pdfium_gyp,turbulenz/gyp,android-ia/platform_external_chromium_org_tools_gyp,openpeer/webrtc-gyp,mgamer/gyp,enkripsi/gyp,Jack-Q/GYP-copy,bulldy80/gyp_unofficial,Danath/gyp,Omegaphora/external_chromium_org_tools_gyp,openpeer/webrtc-gyp,bpsinc-native/src_tools_gyp,yangrongwei/gyp,MIPS/external-chromium_org-tools-gyp,luvit/gyp,bnq4ever/gypgoogle,msc-/gyp,adblockplus/gyp,AOSPU/external_chromium_org_tools_gyp,AWhetter/gyp,tarc/gyp,amoikevin/gyp,carlTLR/gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,sloanyang/gyp,ttyangf/gyp,dougbeal/gyp,LazyCodingCat/gyp,yjhjstz/gyp,Phuehvk/gyp,saghul/gyn,bnoordhuis/gyp,enkripsi/gyp,cchamberlain/gyp,svn2github/kgyp,Jack-Q/GYP-copy,mistydemeo/gyp,enkripsi/gyp,ttyangf/pdfium_gyp,dougbeal/gyp,Danath/gyp,sdklite/gyp,yjhjstz/gyp,android-ia/platform_external_chromium_org_tools_gyp,svn2github/gyp,pandaxcl/gyp,msc-/gyp,sport-monkey/GYP,mistydemeo/gyp,LazyCodingCat/gyp,springmeyer/gyp,okwasi/gyp,cysp/gyp,android-ia/platform_external_chromium_org_tools_gyp,bpsinc-native/src_tools_gyp,bulldy80/gyp_unofficial,Chilledheart/gyp,trafi/gyp,amoikevin/gyp,cysp/gyp,saghul/gyn,Jack-Q/GYP-copy,springmeyer/gyp,yangrongwei/gyp,yinquan529/platform-external-chromium_org-tools-gyp,amoikevin/gyp,ttyangf/pdfium_gyp,sport-monkey/GYP,okwasi/gyp,okumura/gyp,Danath/gyp,sloanyang/gyp,yinquan529/platform-external-chromium_org-tools-gyp,Omegaphora/external_chromium_org_tools_gyp,svn2github/gyp,clar/gyp,sport-monkey/GYP,AOSPU/external_chromium_org_tools_gyp,okwasi/gyp,luvit/gyp,LazyCodingCat/gyp,mapbox/gyp,carlTLR/gyp,okumura/gyp,ttyangf/gyp,bulldy80/gyp_unofficial,okumura/gyp,LazyCodingCat/gyp,sanyaade-teachings/gyp,ryfx/gyp,AWhetter/gyp,bulldy80/gyp_unofficial,chromium/gyp,LazyCodingCat/gyp,openpeer/webrtc-gyp,AOSPU/external_chromium_org_tools_gyp,ryfx/gyp,mkrautz/gyp-libmumble,azunite/gyp,pandaxcl/gyp,sanyaade-teachings/gyp,cysp/gyp,bnq4ever/gypgoogle,erikge/watch_gyp,clar/gyp,cysp/gyp,android-ia/platform_external_chromium_org_tools_gyp,sloanyang/gyp,enkripsi/gyp
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003
git-svn-id: e7e1075985beda50ea81ac4472467b4f6e91fc78@863 78cadc50-ecff-11dd-a971-7dbc132099af
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
<commit_msg>Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003
git-svn-id: e7e1075985beda50ea81ac4472467b4f6e91fc78@863 78cadc50-ecff-11dd-a971-7dbc132099af<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003
git-svn-id: e7e1075985beda50ea81ac4472467b4f6e91fc78@863 78cadc50-ecff-11dd-a971-7dbc132099af#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
<commit_msg>Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003
git-svn-id: e7e1075985beda50ea81ac4472467b4f6e91fc78@863 78cadc50-ecff-11dd-a971-7dbc132099af<commit_after>#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
0404f6ebc33d83fc6dfeceed5d9370e73ef40e64
|
awx/main/conf.py
|
awx/main/conf.py
|
# Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
|
# Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
# TODO: Caching so we don't have to hit the database every time for settings
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
|
Add a note about caching
|
Add a note about caching
|
Python
|
apache-2.0
|
snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,snahelou/awx
|
# Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
Add a note about caching
|
# Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
# TODO: Caching so we don't have to hit the database every time for settings
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
|
<commit_before># Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
<commit_msg>Add a note about caching<commit_after>
|
# Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
# TODO: Caching so we don't have to hit the database every time for settings
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
|
# Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
Add a note about caching# Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
# TODO: Caching so we don't have to hit the database every time for settings
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
|
<commit_before># Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
<commit_msg>Add a note about caching<commit_after># Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
# TODO: Caching so we don't have to hit the database every time for settings
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
|
19c4d0035c0e64425adb4aee34a9e364172e529c
|
gitcommitautosave.py
|
gitcommitautosave.py
|
"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.run_command("save")
|
"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo')
if path and any(path.endswith(name) for name in git_files):
return True
|
Add support for interactive rebase
|
Add support for interactive rebase
|
Python
|
mit
|
aristidesfl/sublime-git-commit-message-auto-save
|
"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.run_command("save")
Add support for interactive rebase
|
"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo')
if path and any(path.endswith(name) for name in git_files):
return True
|
<commit_before>"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.run_command("save")
<commit_msg>Add support for interactive rebase<commit_after>
|
"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo')
if path and any(path.endswith(name) for name in git_files):
return True
|
"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.run_command("save")
Add support for interactive rebase"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo')
if path and any(path.endswith(name) for name in git_files):
return True
|
<commit_before>"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.run_command("save")
<commit_msg>Add support for interactive rebase<commit_after>"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo')
if path and any(path.endswith(name) for name in git_files):
return True
|
9416261fefeb37ad89509e54975bcba02069183b
|
pywikibot/__metadata__.py
|
pywikibot/__metadata__.py
|
# -*- coding: utf-8 -*-
"""Pywikibot metadata file."""
#
# (C) Pywikibot team, 2020
#
# Distributed under the terms of the MIT license.
#
__name__ = 'pywikibot'
__version__ = '4.0.1.dev0'
__description__ = 'Python MediaWiki Bot Framework'
__maintainer__ = 'The Pywikibot team'
__maintainer_email__ = 'pywikibot@lists.wikimedia.org'
__license__ = 'MIT License'
__url__ = 'https://www.mediawiki.org/wiki/Manual:Pywikibot'
__download_url__ = 'https://pywikibot.toolforge.org/'
__copyright__ = '(C) Pywikibot team, 2003-2020'
__keywords__ = 'API bot client framework mediawiki pwb python pywiki ' \
'pywikibase pywikibot pywikipedia pywikipediabot wiki ' \
'wikibase wikidata wikimedia wikipedia'
|
# -*- coding: utf-8 -*-
"""Pywikibot metadata file."""
#
# (C) Pywikibot team, 2020
#
# Distributed under the terms of the MIT license.
#
__name__ = 'pywikibot'
__version__ = '4.1.0.dev0'
__description__ = 'Python MediaWiki Bot Framework'
__maintainer__ = 'The Pywikibot team'
__maintainer_email__ = 'pywikibot@lists.wikimedia.org'
__license__ = 'MIT License'
__url__ = 'https://www.mediawiki.org/wiki/Manual:Pywikibot'
__download_url__ = 'https://pywikibot.toolforge.org/'
__copyright__ = '(C) Pywikibot team, 2003-2020'
__keywords__ = 'API bot client framework mediawiki pwb python pywiki ' \
'pywikibase pywikibot pywikipedia pywikipediabot wiki ' \
'wikibase wikidata wikimedia wikipedia'
|
Update pwb version to 4.1
|
[4.1] Update pwb version to 4.1
The current development is more than just bugfixes and i18n/L10N updates
Change-Id: I581bfa1ee49f91161d904227e1be338db8361819
|
Python
|
mit
|
wikimedia/pywikibot-core,wikimedia/pywikibot-core
|
# -*- coding: utf-8 -*-
"""Pywikibot metadata file."""
#
# (C) Pywikibot team, 2020
#
# Distributed under the terms of the MIT license.
#
__name__ = 'pywikibot'
__version__ = '4.0.1.dev0'
__description__ = 'Python MediaWiki Bot Framework'
__maintainer__ = 'The Pywikibot team'
__maintainer_email__ = 'pywikibot@lists.wikimedia.org'
__license__ = 'MIT License'
__url__ = 'https://www.mediawiki.org/wiki/Manual:Pywikibot'
__download_url__ = 'https://pywikibot.toolforge.org/'
__copyright__ = '(C) Pywikibot team, 2003-2020'
__keywords__ = 'API bot client framework mediawiki pwb python pywiki ' \
'pywikibase pywikibot pywikipedia pywikipediabot wiki ' \
'wikibase wikidata wikimedia wikipedia'
[4.1] Update pwb version to 4.1
The current development is more than just bugfixes and i18n/L10N updates
Change-Id: I581bfa1ee49f91161d904227e1be338db8361819
|
# -*- coding: utf-8 -*-
"""Pywikibot metadata file."""
#
# (C) Pywikibot team, 2020
#
# Distributed under the terms of the MIT license.
#
__name__ = 'pywikibot'
__version__ = '4.1.0.dev0'
__description__ = 'Python MediaWiki Bot Framework'
__maintainer__ = 'The Pywikibot team'
__maintainer_email__ = 'pywikibot@lists.wikimedia.org'
__license__ = 'MIT License'
__url__ = 'https://www.mediawiki.org/wiki/Manual:Pywikibot'
__download_url__ = 'https://pywikibot.toolforge.org/'
__copyright__ = '(C) Pywikibot team, 2003-2020'
__keywords__ = 'API bot client framework mediawiki pwb python pywiki ' \
'pywikibase pywikibot pywikipedia pywikipediabot wiki ' \
'wikibase wikidata wikimedia wikipedia'
|
<commit_before># -*- coding: utf-8 -*-
"""Pywikibot metadata file."""
#
# (C) Pywikibot team, 2020
#
# Distributed under the terms of the MIT license.
#
__name__ = 'pywikibot'
__version__ = '4.0.1.dev0'
__description__ = 'Python MediaWiki Bot Framework'
__maintainer__ = 'The Pywikibot team'
__maintainer_email__ = 'pywikibot@lists.wikimedia.org'
__license__ = 'MIT License'
__url__ = 'https://www.mediawiki.org/wiki/Manual:Pywikibot'
__download_url__ = 'https://pywikibot.toolforge.org/'
__copyright__ = '(C) Pywikibot team, 2003-2020'
__keywords__ = 'API bot client framework mediawiki pwb python pywiki ' \
'pywikibase pywikibot pywikipedia pywikipediabot wiki ' \
'wikibase wikidata wikimedia wikipedia'
<commit_msg>[4.1] Update pwb version to 4.1
The current development is more than just bugfixes and i18n/L10N updates
Change-Id: I581bfa1ee49f91161d904227e1be338db8361819<commit_after>
|
# -*- coding: utf-8 -*-
"""Pywikibot metadata file."""
#
# (C) Pywikibot team, 2020
#
# Distributed under the terms of the MIT license.
#
__name__ = 'pywikibot'
__version__ = '4.1.0.dev0'
__description__ = 'Python MediaWiki Bot Framework'
__maintainer__ = 'The Pywikibot team'
__maintainer_email__ = 'pywikibot@lists.wikimedia.org'
__license__ = 'MIT License'
__url__ = 'https://www.mediawiki.org/wiki/Manual:Pywikibot'
__download_url__ = 'https://pywikibot.toolforge.org/'
__copyright__ = '(C) Pywikibot team, 2003-2020'
__keywords__ = 'API bot client framework mediawiki pwb python pywiki ' \
'pywikibase pywikibot pywikipedia pywikipediabot wiki ' \
'wikibase wikidata wikimedia wikipedia'
|
# -*- coding: utf-8 -*-
"""Pywikibot metadata file."""
#
# (C) Pywikibot team, 2020
#
# Distributed under the terms of the MIT license.
#
__name__ = 'pywikibot'
__version__ = '4.0.1.dev0'
__description__ = 'Python MediaWiki Bot Framework'
__maintainer__ = 'The Pywikibot team'
__maintainer_email__ = 'pywikibot@lists.wikimedia.org'
__license__ = 'MIT License'
__url__ = 'https://www.mediawiki.org/wiki/Manual:Pywikibot'
__download_url__ = 'https://pywikibot.toolforge.org/'
__copyright__ = '(C) Pywikibot team, 2003-2020'
__keywords__ = 'API bot client framework mediawiki pwb python pywiki ' \
'pywikibase pywikibot pywikipedia pywikipediabot wiki ' \
'wikibase wikidata wikimedia wikipedia'
[4.1] Update pwb version to 4.1
The current development is more than just bugfixes and i18n/L10N updates
Change-Id: I581bfa1ee49f91161d904227e1be338db8361819# -*- coding: utf-8 -*-
"""Pywikibot metadata file."""
#
# (C) Pywikibot team, 2020
#
# Distributed under the terms of the MIT license.
#
__name__ = 'pywikibot'
__version__ = '4.1.0.dev0'
__description__ = 'Python MediaWiki Bot Framework'
__maintainer__ = 'The Pywikibot team'
__maintainer_email__ = 'pywikibot@lists.wikimedia.org'
__license__ = 'MIT License'
__url__ = 'https://www.mediawiki.org/wiki/Manual:Pywikibot'
__download_url__ = 'https://pywikibot.toolforge.org/'
__copyright__ = '(C) Pywikibot team, 2003-2020'
__keywords__ = 'API bot client framework mediawiki pwb python pywiki ' \
'pywikibase pywikibot pywikipedia pywikipediabot wiki ' \
'wikibase wikidata wikimedia wikipedia'
|
<commit_before># -*- coding: utf-8 -*-
"""Pywikibot metadata file."""
#
# (C) Pywikibot team, 2020
#
# Distributed under the terms of the MIT license.
#
__name__ = 'pywikibot'
__version__ = '4.0.1.dev0'
__description__ = 'Python MediaWiki Bot Framework'
__maintainer__ = 'The Pywikibot team'
__maintainer_email__ = 'pywikibot@lists.wikimedia.org'
__license__ = 'MIT License'
__url__ = 'https://www.mediawiki.org/wiki/Manual:Pywikibot'
__download_url__ = 'https://pywikibot.toolforge.org/'
__copyright__ = '(C) Pywikibot team, 2003-2020'
__keywords__ = 'API bot client framework mediawiki pwb python pywiki ' \
'pywikibase pywikibot pywikipedia pywikipediabot wiki ' \
'wikibase wikidata wikimedia wikipedia'
<commit_msg>[4.1] Update pwb version to 4.1
The current development is more than just bugfixes and i18n/L10N updates
Change-Id: I581bfa1ee49f91161d904227e1be338db8361819<commit_after># -*- coding: utf-8 -*-
"""Pywikibot metadata file."""
#
# (C) Pywikibot team, 2020
#
# Distributed under the terms of the MIT license.
#
__name__ = 'pywikibot'
__version__ = '4.1.0.dev0'
__description__ = 'Python MediaWiki Bot Framework'
__maintainer__ = 'The Pywikibot team'
__maintainer_email__ = 'pywikibot@lists.wikimedia.org'
__license__ = 'MIT License'
__url__ = 'https://www.mediawiki.org/wiki/Manual:Pywikibot'
__download_url__ = 'https://pywikibot.toolforge.org/'
__copyright__ = '(C) Pywikibot team, 2003-2020'
__keywords__ = 'API bot client framework mediawiki pwb python pywiki ' \
'pywikibase pywikibot pywikipedia pywikipediabot wiki ' \
'wikibase wikidata wikimedia wikipedia'
|
55c183ad234ec53e2c7ba82e9e19793564373200
|
comics/comics/dieselsweetiesweb.py
|
comics/comics/dieselsweetiesweb.py
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not entry.summary:
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not hasattr(entry, 'summary'):
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
|
Check if field exists, not if it's empty
|
Check if field exists, not if it's empty
|
Python
|
agpl-3.0
|
jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics,jodal/comics,datagutten/comics
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not entry.summary:
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
Check if field exists, not if it's empty
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not hasattr(entry, 'summary'):
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
|
<commit_before>from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not entry.summary:
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
<commit_msg>Check if field exists, not if it's empty<commit_after>
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not hasattr(entry, 'summary'):
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not entry.summary:
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
Check if field exists, not if it's emptyfrom comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not hasattr(entry, 'summary'):
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
|
<commit_before>from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not entry.summary:
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
<commit_msg>Check if field exists, not if it's empty<commit_after>from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not hasattr(entry, 'summary'):
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
|
15838f52f7f0cc40bcec8f64ad59dffe6bd945a5
|
hatarake/__init__.py
|
hatarake/__init__.py
|
import os
import platform
from hatarake.version import __version__
ISSUES_LINK = 'https://github.com/kfdm/hatarake/issues'
ISSUES_API = 'https://api.github.com/repos/kfdm/hatarake/issues?state=open'
USER_AGENT = 'Hatarake/%s https://github.com/kfdm/hatarake' % __version__
GROWL_INTERVAL = 30
if 'Darwin' in platform.uname():
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'config.ini'
)
else:
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'config.ini',
)
|
import os
import platform
from hatarake.version import __version__
ISSUES_LINK = 'https://github.com/kfdm/hatarake/issues'
ISSUES_API = 'https://api.github.com/repos/kfdm/hatarake/issues?state=open'
USER_AGENT = 'Hatarake/%s https://github.com/kfdm/hatarake' % __version__
GROWL_INTERVAL = 30
if 'Darwin' in platform.uname():
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'config.ini'
)
DB_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'history.db'
)
else:
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'config.ini',
)
DB_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'history.db',
)
|
Add path for sqlite file
|
Add path for sqlite file
|
Python
|
mit
|
kfdm/hatarake
|
import os
import platform
from hatarake.version import __version__
ISSUES_LINK = 'https://github.com/kfdm/hatarake/issues'
ISSUES_API = 'https://api.github.com/repos/kfdm/hatarake/issues?state=open'
USER_AGENT = 'Hatarake/%s https://github.com/kfdm/hatarake' % __version__
GROWL_INTERVAL = 30
if 'Darwin' in platform.uname():
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'config.ini'
)
else:
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'config.ini',
)
Add path for sqlite file
|
import os
import platform
from hatarake.version import __version__
ISSUES_LINK = 'https://github.com/kfdm/hatarake/issues'
ISSUES_API = 'https://api.github.com/repos/kfdm/hatarake/issues?state=open'
USER_AGENT = 'Hatarake/%s https://github.com/kfdm/hatarake' % __version__
GROWL_INTERVAL = 30
if 'Darwin' in platform.uname():
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'config.ini'
)
DB_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'history.db'
)
else:
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'config.ini',
)
DB_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'history.db',
)
|
<commit_before>import os
import platform
from hatarake.version import __version__
ISSUES_LINK = 'https://github.com/kfdm/hatarake/issues'
ISSUES_API = 'https://api.github.com/repos/kfdm/hatarake/issues?state=open'
USER_AGENT = 'Hatarake/%s https://github.com/kfdm/hatarake' % __version__
GROWL_INTERVAL = 30
if 'Darwin' in platform.uname():
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'config.ini'
)
else:
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'config.ini',
)
<commit_msg>Add path for sqlite file<commit_after>
|
import os
import platform
from hatarake.version import __version__
ISSUES_LINK = 'https://github.com/kfdm/hatarake/issues'
ISSUES_API = 'https://api.github.com/repos/kfdm/hatarake/issues?state=open'
USER_AGENT = 'Hatarake/%s https://github.com/kfdm/hatarake' % __version__
GROWL_INTERVAL = 30
if 'Darwin' in platform.uname():
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'config.ini'
)
DB_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'history.db'
)
else:
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'config.ini',
)
DB_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'history.db',
)
|
import os
import platform
from hatarake.version import __version__
ISSUES_LINK = 'https://github.com/kfdm/hatarake/issues'
ISSUES_API = 'https://api.github.com/repos/kfdm/hatarake/issues?state=open'
USER_AGENT = 'Hatarake/%s https://github.com/kfdm/hatarake' % __version__
GROWL_INTERVAL = 30
if 'Darwin' in platform.uname():
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'config.ini'
)
else:
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'config.ini',
)
Add path for sqlite fileimport os
import platform
from hatarake.version import __version__
ISSUES_LINK = 'https://github.com/kfdm/hatarake/issues'
ISSUES_API = 'https://api.github.com/repos/kfdm/hatarake/issues?state=open'
USER_AGENT = 'Hatarake/%s https://github.com/kfdm/hatarake' % __version__
GROWL_INTERVAL = 30
if 'Darwin' in platform.uname():
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'config.ini'
)
DB_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'history.db'
)
else:
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'config.ini',
)
DB_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'history.db',
)
|
<commit_before>import os
import platform
from hatarake.version import __version__
ISSUES_LINK = 'https://github.com/kfdm/hatarake/issues'
ISSUES_API = 'https://api.github.com/repos/kfdm/hatarake/issues?state=open'
USER_AGENT = 'Hatarake/%s https://github.com/kfdm/hatarake' % __version__
GROWL_INTERVAL = 30
if 'Darwin' in platform.uname():
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'config.ini'
)
else:
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'config.ini',
)
<commit_msg>Add path for sqlite file<commit_after>import os
import platform
from hatarake.version import __version__
ISSUES_LINK = 'https://github.com/kfdm/hatarake/issues'
ISSUES_API = 'https://api.github.com/repos/kfdm/hatarake/issues?state=open'
USER_AGENT = 'Hatarake/%s https://github.com/kfdm/hatarake' % __version__
GROWL_INTERVAL = 30
if 'Darwin' in platform.uname():
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'config.ini'
)
DB_PATH = os.path.join(
os.path.expanduser("~"),
'Library',
'Application Support',
'Hatarake',
'history.db'
)
else:
CONFIG_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'config.ini',
)
DB_PATH = os.path.join(
os.path.expanduser("~"),
'.config',
'Hatarake',
'history.db',
)
|
308390bdd15c9a4abc79b567577b160c8e4adfab
|
examples/demo/setup.py
|
examples/demo/setup.py
|
from setuptools import setup
setup(name='demo',
install_requires=[
'wsme',
'PasteScript',
'PasteDeploy',
'WSGIUtils',
'Pygments',
],
package=['demo'])
|
from setuptools import setup
setup(name='demo',
install_requires=[
'WSME',
'WSME-Soap',
'PasteScript',
'PasteDeploy',
'WSGIUtils',
'Pygments',
],
package=['demo'])
|
Add a dependency on WSME-Soap
|
Add a dependency on WSME-Soap
|
Python
|
mit
|
stackforge/wsme
|
from setuptools import setup
setup(name='demo',
install_requires=[
'wsme',
'PasteScript',
'PasteDeploy',
'WSGIUtils',
'Pygments',
],
package=['demo'])
Add a dependency on WSME-Soap
|
from setuptools import setup
setup(name='demo',
install_requires=[
'WSME',
'WSME-Soap',
'PasteScript',
'PasteDeploy',
'WSGIUtils',
'Pygments',
],
package=['demo'])
|
<commit_before>from setuptools import setup
setup(name='demo',
install_requires=[
'wsme',
'PasteScript',
'PasteDeploy',
'WSGIUtils',
'Pygments',
],
package=['demo'])
<commit_msg>Add a dependency on WSME-Soap<commit_after>
|
from setuptools import setup
setup(name='demo',
install_requires=[
'WSME',
'WSME-Soap',
'PasteScript',
'PasteDeploy',
'WSGIUtils',
'Pygments',
],
package=['demo'])
|
from setuptools import setup
setup(name='demo',
install_requires=[
'wsme',
'PasteScript',
'PasteDeploy',
'WSGIUtils',
'Pygments',
],
package=['demo'])
Add a dependency on WSME-Soapfrom setuptools import setup
setup(name='demo',
install_requires=[
'WSME',
'WSME-Soap',
'PasteScript',
'PasteDeploy',
'WSGIUtils',
'Pygments',
],
package=['demo'])
|
<commit_before>from setuptools import setup
setup(name='demo',
install_requires=[
'wsme',
'PasteScript',
'PasteDeploy',
'WSGIUtils',
'Pygments',
],
package=['demo'])
<commit_msg>Add a dependency on WSME-Soap<commit_after>from setuptools import setup
setup(name='demo',
install_requires=[
'WSME',
'WSME-Soap',
'PasteScript',
'PasteDeploy',
'WSGIUtils',
'Pygments',
],
package=['demo'])
|
9ca8b4bddabe8bdf91019d0bbc9a792feacbaff9
|
config.py
|
config.py
|
# Log config
import logging
BASE_PATH = "/home/user/workspace/OSHI-monitoring/"
RRD_LOG_PATH = BASE_PATH+"logs/"
TRAFFIC_MONITOR_LOG_PATH = BASE_PATH+"logs/"
LOG_LEVEL = logging.ERROR
# RRD config
RRD_STEP = "300"
RRD_STORE_PATH = BASE_PATH+"rrd/"
RRD_DATA_SOURCE_TYPE = "GAUGE"
RRD_DATA_SOURCE_HEARTBEAT = "600"
# Traffic monitor config
REQUEST_INTERVAL = 30
LLDP_NOISE_BYTE_S = 19
LLDP_NOISE_PACK_S = 0.365
|
# Log config
import logging
BASE_PATH = "/home/user/workspace/OSHI-monitoring/"
RRD_LOG_PATH = BASE_PATH + "logs/"
TRAFFIC_MONITOR_LOG_PATH = BASE_PATH + "logs/"
LOG_LEVEL = logging.ERROR
# Traffic monitor config
REQUEST_INTERVAL = 30
LLDP_NOISE_BYTE_S = 19
LLDP_NOISE_PACK_S = 0.365
# RRD config
RRD_STEP = str(REQUEST_INTERVAL)
RRD_STORE_PATH = BASE_PATH + "rrd/"
RRD_DATA_SOURCE_TYPE = "GAUGE"
RRD_DATA_SOURCE_HEARTBEAT = "60"
|
Set RRD STEP equal to REQUEST_INTERVAL.
|
Set RRD STEP equal to REQUEST_INTERVAL.
|
Python
|
mit
|
StefanoSalsano/OSHI-monitoring,StefanoSalsano/OSHI-monitoring,netgroup/OSHI-monitoring,ferrarimarco/OSHI-monitoring,ferrarimarco/OSHI-monitoring,netgroup/OSHI-monitoring
|
# Log config
import logging
BASE_PATH = "/home/user/workspace/OSHI-monitoring/"
RRD_LOG_PATH = BASE_PATH+"logs/"
TRAFFIC_MONITOR_LOG_PATH = BASE_PATH+"logs/"
LOG_LEVEL = logging.ERROR
# RRD config
RRD_STEP = "300"
RRD_STORE_PATH = BASE_PATH+"rrd/"
RRD_DATA_SOURCE_TYPE = "GAUGE"
RRD_DATA_SOURCE_HEARTBEAT = "600"
# Traffic monitor config
REQUEST_INTERVAL = 30
LLDP_NOISE_BYTE_S = 19
LLDP_NOISE_PACK_S = 0.365
Set RRD STEP equal to REQUEST_INTERVAL.
|
# Log config
import logging
BASE_PATH = "/home/user/workspace/OSHI-monitoring/"
RRD_LOG_PATH = BASE_PATH + "logs/"
TRAFFIC_MONITOR_LOG_PATH = BASE_PATH + "logs/"
LOG_LEVEL = logging.ERROR
# Traffic monitor config
REQUEST_INTERVAL = 30
LLDP_NOISE_BYTE_S = 19
LLDP_NOISE_PACK_S = 0.365
# RRD config
RRD_STEP = str(REQUEST_INTERVAL)
RRD_STORE_PATH = BASE_PATH + "rrd/"
RRD_DATA_SOURCE_TYPE = "GAUGE"
RRD_DATA_SOURCE_HEARTBEAT = "60"
|
<commit_before># Log config
import logging
BASE_PATH = "/home/user/workspace/OSHI-monitoring/"
RRD_LOG_PATH = BASE_PATH+"logs/"
TRAFFIC_MONITOR_LOG_PATH = BASE_PATH+"logs/"
LOG_LEVEL = logging.ERROR
# RRD config
RRD_STEP = "300"
RRD_STORE_PATH = BASE_PATH+"rrd/"
RRD_DATA_SOURCE_TYPE = "GAUGE"
RRD_DATA_SOURCE_HEARTBEAT = "600"
# Traffic monitor config
REQUEST_INTERVAL = 30
LLDP_NOISE_BYTE_S = 19
LLDP_NOISE_PACK_S = 0.365
<commit_msg>Set RRD STEP equal to REQUEST_INTERVAL.<commit_after>
|
# Log config
import logging
BASE_PATH = "/home/user/workspace/OSHI-monitoring/"
RRD_LOG_PATH = BASE_PATH + "logs/"
TRAFFIC_MONITOR_LOG_PATH = BASE_PATH + "logs/"
LOG_LEVEL = logging.ERROR
# Traffic monitor config
REQUEST_INTERVAL = 30
LLDP_NOISE_BYTE_S = 19
LLDP_NOISE_PACK_S = 0.365
# RRD config
RRD_STEP = str(REQUEST_INTERVAL)
RRD_STORE_PATH = BASE_PATH + "rrd/"
RRD_DATA_SOURCE_TYPE = "GAUGE"
RRD_DATA_SOURCE_HEARTBEAT = "60"
|
# Log config
import logging
BASE_PATH = "/home/user/workspace/OSHI-monitoring/"
RRD_LOG_PATH = BASE_PATH+"logs/"
TRAFFIC_MONITOR_LOG_PATH = BASE_PATH+"logs/"
LOG_LEVEL = logging.ERROR
# RRD config
RRD_STEP = "300"
RRD_STORE_PATH = BASE_PATH+"rrd/"
RRD_DATA_SOURCE_TYPE = "GAUGE"
RRD_DATA_SOURCE_HEARTBEAT = "600"
# Traffic monitor config
REQUEST_INTERVAL = 30
LLDP_NOISE_BYTE_S = 19
LLDP_NOISE_PACK_S = 0.365
Set RRD STEP equal to REQUEST_INTERVAL.# Log config
import logging
BASE_PATH = "/home/user/workspace/OSHI-monitoring/"
RRD_LOG_PATH = BASE_PATH + "logs/"
TRAFFIC_MONITOR_LOG_PATH = BASE_PATH + "logs/"
LOG_LEVEL = logging.ERROR
# Traffic monitor config
REQUEST_INTERVAL = 30
LLDP_NOISE_BYTE_S = 19
LLDP_NOISE_PACK_S = 0.365
# RRD config
RRD_STEP = str(REQUEST_INTERVAL)
RRD_STORE_PATH = BASE_PATH + "rrd/"
RRD_DATA_SOURCE_TYPE = "GAUGE"
RRD_DATA_SOURCE_HEARTBEAT = "60"
|
<commit_before># Log config
import logging
BASE_PATH = "/home/user/workspace/OSHI-monitoring/"
RRD_LOG_PATH = BASE_PATH+"logs/"
TRAFFIC_MONITOR_LOG_PATH = BASE_PATH+"logs/"
LOG_LEVEL = logging.ERROR
# RRD config
RRD_STEP = "300"
RRD_STORE_PATH = BASE_PATH+"rrd/"
RRD_DATA_SOURCE_TYPE = "GAUGE"
RRD_DATA_SOURCE_HEARTBEAT = "600"
# Traffic monitor config
REQUEST_INTERVAL = 30
LLDP_NOISE_BYTE_S = 19
LLDP_NOISE_PACK_S = 0.365
<commit_msg>Set RRD STEP equal to REQUEST_INTERVAL.<commit_after># Log config
import logging
BASE_PATH = "/home/user/workspace/OSHI-monitoring/"
RRD_LOG_PATH = BASE_PATH + "logs/"
TRAFFIC_MONITOR_LOG_PATH = BASE_PATH + "logs/"
LOG_LEVEL = logging.ERROR
# Traffic monitor config
REQUEST_INTERVAL = 30
LLDP_NOISE_BYTE_S = 19
LLDP_NOISE_PACK_S = 0.365
# RRD config
RRD_STEP = str(REQUEST_INTERVAL)
RRD_STORE_PATH = BASE_PATH + "rrd/"
RRD_DATA_SOURCE_TYPE = "GAUGE"
RRD_DATA_SOURCE_HEARTBEAT = "60"
|
9a7654c727a24eecadc26ac400408cb4837ec0cc
|
pywayland/protocol/__init__.py
|
pywayland/protocol/__init__.py
|
# Copyright 2015 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
# Copyright 2015 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
import sys
# there is not intflag in python 3.5, which is used in bitfield's
# without a proper flag type, these enums will be less usable, if we need to
# support this, we can pull in some backport of the necessary functionality
if sys.version_info < (3, 6):
enum.IntFlag = enum.IntEnum
|
Fix intflag on Python 3.5
|
Fix intflag on Python 3.5
Just define it to IntEnum, which is definitely not going to be correct,
but this should fix the tests on Python 3.5
|
Python
|
apache-2.0
|
flacjacket/pywayland
|
# Copyright 2015 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Fix intflag on Python 3.5
Just define it to IntEnum, which is definitely not going to be correct,
but this should fix the tests on Python 3.5
|
# Copyright 2015 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
import sys
# there is not intflag in python 3.5, which is used in bitfield's
# without a proper flag type, these enums will be less usable, if we need to
# support this, we can pull in some backport of the necessary functionality
if sys.version_info < (3, 6):
enum.IntFlag = enum.IntEnum
|
<commit_before># Copyright 2015 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<commit_msg>Fix intflag on Python 3.5
Just define it to IntEnum, which is definitely not going to be correct,
but this should fix the tests on Python 3.5<commit_after>
|
# Copyright 2015 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
import sys
# there is not intflag in python 3.5, which is used in bitfield's
# without a proper flag type, these enums will be less usable, if we need to
# support this, we can pull in some backport of the necessary functionality
if sys.version_info < (3, 6):
enum.IntFlag = enum.IntEnum
|
# Copyright 2015 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Fix intflag on Python 3.5
Just define it to IntEnum, which is definitely not going to be correct,
but this should fix the tests on Python 3.5# Copyright 2015 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
import sys
# there is not intflag in python 3.5, which is used in bitfield's
# without a proper flag type, these enums will be less usable, if we need to
# support this, we can pull in some backport of the necessary functionality
if sys.version_info < (3, 6):
enum.IntFlag = enum.IntEnum
|
<commit_before># Copyright 2015 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<commit_msg>Fix intflag on Python 3.5
Just define it to IntEnum, which is definitely not going to be correct,
but this should fix the tests on Python 3.5<commit_after># Copyright 2015 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
import sys
# there is not intflag in python 3.5, which is used in bitfield's
# without a proper flag type, these enums will be less usable, if we need to
# support this, we can pull in some backport of the necessary functionality
if sys.version_info < (3, 6):
enum.IntFlag = enum.IntEnum
|
131cb9abd711cc71c558e5a89d5e2b8a28ae8517
|
tests/integration/test_gists.py
|
tests/integration/test_gists.py
|
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
# -*- coding: utf-8 -*-
"""Integration tests for methods implemented on Gist."""
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
"""Gist integration tests."""
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
"""Show that a user can iterate over the commits in a gist."""
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
"""Show that a user can iterate over the forks of a gist."""
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
Add docstrings to Gist integration tests
|
Add docstrings to Gist integration tests
@esacteksab would be so proud
|
Python
|
bsd-3-clause
|
krxsky/github3.py,balloob/github3.py,jim-minter/github3.py,ueg1990/github3.py,wbrefvem/github3.py,agamdua/github3.py,christophelec/github3.py,icio/github3.py,sigmavirus24/github3.py,itsmemattchung/github3.py,h4ck3rm1k3/github3.py,degustaf/github3.py
|
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
Add docstrings to Gist integration tests
@esacteksab would be so proud
|
# -*- coding: utf-8 -*-
"""Integration tests for methods implemented on Gist."""
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
"""Gist integration tests."""
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
"""Show that a user can iterate over the commits in a gist."""
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
"""Show that a user can iterate over the forks of a gist."""
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
<commit_before>from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
<commit_msg>Add docstrings to Gist integration tests
@esacteksab would be so proud<commit_after>
|
# -*- coding: utf-8 -*-
"""Integration tests for methods implemented on Gist."""
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
"""Gist integration tests."""
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
"""Show that a user can iterate over the commits in a gist."""
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
"""Show that a user can iterate over the forks of a gist."""
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
Add docstrings to Gist integration tests
@esacteksab would be so proud# -*- coding: utf-8 -*-
"""Integration tests for methods implemented on Gist."""
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
"""Gist integration tests."""
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
"""Show that a user can iterate over the commits in a gist."""
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
"""Show that a user can iterate over the forks of a gist."""
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
<commit_before>from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
<commit_msg>Add docstrings to Gist integration tests
@esacteksab would be so proud<commit_after># -*- coding: utf-8 -*-
"""Integration tests for methods implemented on Gist."""
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
"""Gist integration tests."""
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
"""Show that a user can iterate over the commits in a gist."""
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
"""Show that a user can iterate over the forks of a gist."""
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
02550f389a6b3208d86e7a92f01c9e1df42561f7
|
sahara/tests/unit/testutils.py
|
sahara/tests/unit/testutils.py
|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=[], **kwargs):
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=None, **kwargs):
instances = instances or []
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
|
Use immutable arg rather mutable arg
|
Use immutable arg rather mutable arg
Passing mutable objects as default args is a known Python pitfall.
We'd better avoid this. This commit changes mutable default args with
None, then use 'arg = arg or []'.
Change-Id: If3a10d58e6cd792a2011c177c49d3b865a7421ff
|
Python
|
apache-2.0
|
henaras/sahara,ekasitk/sahara,esikachev/scenario,redhat-openstack/sahara,ekasitk/sahara,tellesnobrega/sahara,tellesnobrega/sahara,keedio/sahara,citrix-openstack-build/sahara,mapr/sahara,ekasitk/sahara,matips/iosr-2015,bigfootproject/sahara,mapr/sahara,zhujzhuo/Sahara,esikachev/sahara-backup,zhangjunli177/sahara,bigfootproject/sahara,henaras/sahara,esikachev/sahara-backup,egafford/sahara,openstack/sahara,bigfootproject/sahara,crobby/sahara,egafford/sahara,crobby/sahara,mapr/sahara,henaras/sahara,matips/iosr-2015,redhat-openstack/sahara,citrix-openstack-build/sahara,zhangjunli177/sahara,openstack/sahara,citrix-openstack-build/sahara,zhangjunli177/sahara,tellesnobrega/storm_plugin,keedio/sahara,keedio/sahara,xme1226/sahara,xme1226/sahara,esikachev/sahara-backup,tellesnobrega/storm_plugin,redhat-openstack/sahara,xme1226/sahara,esikachev/scenario,zhujzhuo/Sahara,zhujzhuo/Sahara,crobby/sahara,esikachev/scenario,tellesnobrega/storm_plugin,matips/iosr-2015
|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=[], **kwargs):
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
Use immutable arg rather mutable arg
Passing mutable objects as default args is a known Python pitfall.
We'd better avoid this. This commit changes mutable default args with
None, then use 'arg = arg or []'.
Change-Id: If3a10d58e6cd792a2011c177c49d3b865a7421ff
|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=None, **kwargs):
instances = instances or []
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
|
<commit_before># Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=[], **kwargs):
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
<commit_msg>Use immutable arg rather mutable arg
Passing mutable objects as default args is a known Python pitfall.
We'd better avoid this. This commit changes mutable default args with
None, then use 'arg = arg or []'.
Change-Id: If3a10d58e6cd792a2011c177c49d3b865a7421ff<commit_after>
|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=None, **kwargs):
instances = instances or []
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=[], **kwargs):
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
Use immutable arg rather mutable arg
Passing mutable objects as default args is a known Python pitfall.
We'd better avoid this. This commit changes mutable default args with
None, then use 'arg = arg or []'.
Change-Id: If3a10d58e6cd792a2011c177c49d3b865a7421ff# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=None, **kwargs):
instances = instances or []
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
|
<commit_before># Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=[], **kwargs):
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
<commit_msg>Use immutable arg rather mutable arg
Passing mutable objects as default args is a known Python pitfall.
We'd better avoid this. This commit changes mutable default args with
None, then use 'arg = arg or []'.
Change-Id: If3a10d58e6cd792a2011c177c49d3b865a7421ff<commit_after># Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=None, **kwargs):
instances = instances or []
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
|
19fb0aa1daceed336c7f452ed361ad79107e75a2
|
server/src/voodoo/sessions/sqlalchemy_data.py
|
server/src/voodoo/sessions/sqlalchemy_data.py
|
#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, Binary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(Binary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
|
#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, LargeBinary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(LargeBinary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
|
Remove sqlalchemy 0.7 warning (Binary => LargeBinary)
|
Remove sqlalchemy 0.7 warning (Binary => LargeBinary)
|
Python
|
bsd-2-clause
|
morelab/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto
|
#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, Binary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(Binary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
Remove sqlalchemy 0.7 warning (Binary => LargeBinary)
|
#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, LargeBinary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(LargeBinary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
|
<commit_before>#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, Binary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(Binary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
<commit_msg>Remove sqlalchemy 0.7 warning (Binary => LargeBinary)<commit_after>
|
#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, LargeBinary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(LargeBinary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
|
#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, Binary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(Binary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
Remove sqlalchemy 0.7 warning (Binary => LargeBinary)#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, LargeBinary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(LargeBinary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
|
<commit_before>#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, Binary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(Binary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
<commit_msg>Remove sqlalchemy 0.7 warning (Binary => LargeBinary)<commit_after>#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, LargeBinary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(LargeBinary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
|
9d184e1d323078d7ce73300ba90f6711c6e8f4c1
|
oauth_access/models.py
|
oauth_access/models.py
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return datetime.datetime.now() < self.expires
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return self.expires and datetime.datetime.now() < self.expires
|
Check if an association has an expiry time before deciding if it's expired
|
Check if an association has an expiry time before deciding if it's expired
|
Python
|
bsd-3-clause
|
eldarion/django-oauth-access,eldarion/django-oauth-access
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return datetime.datetime.now() < self.expires
Check if an association has an expiry time before deciding if it's expired
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return self.expires and datetime.datetime.now() < self.expires
|
<commit_before>import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return datetime.datetime.now() < self.expires
<commit_msg>Check if an association has an expiry time before deciding if it's expired<commit_after>
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return self.expires and datetime.datetime.now() < self.expires
|
import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return datetime.datetime.now() < self.expires
Check if an association has an expiry time before deciding if it's expiredimport datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return self.expires and datetime.datetime.now() < self.expires
|
<commit_before>import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return datetime.datetime.now() < self.expires
<commit_msg>Check if an association has an expiry time before deciding if it's expired<commit_after>import datetime
from django.db import models
from django.contrib.auth.models import User
class UserAssociation(models.Model):
user = models.ForeignKey(User)
service = models.CharField(max_length=75, db_index=True)
identifier = models.CharField(max_length=255, db_index=True)
token = models.CharField(max_length=200)
expires = models.DateTimeField(null=True)
class Meta:
unique_together = [("user", "service")]
def expired(self):
return self.expires and datetime.datetime.now() < self.expires
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.