commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
e6fb5ba043c2db08cbacb119664297b3f3668517
fluent_comments/forms/_captcha.py
fluent_comments/forms/_captcha.py
from django.core.exceptions import ImproperlyConfigured try: from captcha.fields import ReCaptchaField as CaptchaField except ImportError: try: from captcha.fields import CaptchaField except ImportError: raise ImportError( "To use the captcha contact form, you need to have " "django-recaptcha or django-simple-captcha installed." ) class CaptchaFormMixin(object): def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha')
from django.core.exceptions import ImproperlyConfigured class CaptchaFormMixin(object): def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha')
Fix duplicate captcha import check
Fix duplicate captcha import check
Python
apache-2.0
django-fluent/django-fluent-comments,django-fluent/django-fluent-comments,edoburu/django-fluent-comments,django-fluent/django-fluent-comments,django-fluent/django-fluent-comments,edoburu/django-fluent-comments,edoburu/django-fluent-comments
from django.core.exceptions import ImproperlyConfigured try: from captcha.fields import ReCaptchaField as CaptchaField except ImportError: try: from captcha.fields import CaptchaField except ImportError: raise ImportError( "To use the captcha contact form, you need to have " "django-recaptcha or django-simple-captcha installed." ) class CaptchaFormMixin(object): def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha') Fix duplicate captcha import check
from django.core.exceptions import ImproperlyConfigured class CaptchaFormMixin(object): def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha')
<commit_before>from django.core.exceptions import ImproperlyConfigured try: from captcha.fields import ReCaptchaField as CaptchaField except ImportError: try: from captcha.fields import CaptchaField except ImportError: raise ImportError( "To use the captcha contact form, you need to have " "django-recaptcha or django-simple-captcha installed." ) class CaptchaFormMixin(object): def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha') <commit_msg>Fix duplicate captcha import check<commit_after>
from django.core.exceptions import ImproperlyConfigured class CaptchaFormMixin(object): def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha')
from django.core.exceptions import ImproperlyConfigured try: from captcha.fields import ReCaptchaField as CaptchaField except ImportError: try: from captcha.fields import CaptchaField except ImportError: raise ImportError( "To use the captcha contact form, you need to have " "django-recaptcha or django-simple-captcha installed." ) class CaptchaFormMixin(object): def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha') Fix duplicate captcha import checkfrom django.core.exceptions import ImproperlyConfigured class CaptchaFormMixin(object): def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha')
<commit_before>from django.core.exceptions import ImproperlyConfigured try: from captcha.fields import ReCaptchaField as CaptchaField except ImportError: try: from captcha.fields import CaptchaField except ImportError: raise ImportError( "To use the captcha contact form, you need to have " "django-recaptcha or django-simple-captcha installed." ) class CaptchaFormMixin(object): def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha') <commit_msg>Fix duplicate captcha import check<commit_after>from django.core.exceptions import ImproperlyConfigured class CaptchaFormMixin(object): def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha')
7f1ddec9e170941e3a5159236ede817c2d569f38
graphical_tests/test_partition.py
graphical_tests/test_partition.py
""" Draw a solid circle off center in a blank image. Use the same array as both the image and the mask. The tiles should subdivide along the curved edge to trace out a smooth circle. """ from skimage import draw import matplotlib.pyplot as plt import numpy as np import photomosaic as pm img = np.zeros((1000, 1000)) rr, cc = draw.circle(300, 500, 150) img[rr, cc] = 1 tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3) plt.imshow(pm.draw_tiles(img, tiles, color=0.5)) plt.savefig('test-partition.png')
""" Draw a solid circle off center in a blank image. Use the same array as both the image and the mask. The tiles should subdivide along the curved edge to trace out a smooth circle. """ from skimage import draw import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import numpy as np import photomosaic as pm img = np.zeros((1000, 1000)) rr, cc = draw.circle(300, 500, 150) img[rr, cc] = 1 tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3) plt.imshow(pm.draw_tile_layout(img, tiles, color=0.5)) plt.savefig('test-partition.png')
Update test to match API.
TST: Update test to match API.
Python
bsd-3-clause
danielballan/photomosaic
""" Draw a solid circle off center in a blank image. Use the same array as both the image and the mask. The tiles should subdivide along the curved edge to trace out a smooth circle. """ from skimage import draw import matplotlib.pyplot as plt import numpy as np import photomosaic as pm img = np.zeros((1000, 1000)) rr, cc = draw.circle(300, 500, 150) img[rr, cc] = 1 tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3) plt.imshow(pm.draw_tiles(img, tiles, color=0.5)) plt.savefig('test-partition.png') TST: Update test to match API.
""" Draw a solid circle off center in a blank image. Use the same array as both the image and the mask. The tiles should subdivide along the curved edge to trace out a smooth circle. """ from skimage import draw import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import numpy as np import photomosaic as pm img = np.zeros((1000, 1000)) rr, cc = draw.circle(300, 500, 150) img[rr, cc] = 1 tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3) plt.imshow(pm.draw_tile_layout(img, tiles, color=0.5)) plt.savefig('test-partition.png')
<commit_before>""" Draw a solid circle off center in a blank image. Use the same array as both the image and the mask. The tiles should subdivide along the curved edge to trace out a smooth circle. """ from skimage import draw import matplotlib.pyplot as plt import numpy as np import photomosaic as pm img = np.zeros((1000, 1000)) rr, cc = draw.circle(300, 500, 150) img[rr, cc] = 1 tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3) plt.imshow(pm.draw_tiles(img, tiles, color=0.5)) plt.savefig('test-partition.png') <commit_msg>TST: Update test to match API.<commit_after>
""" Draw a solid circle off center in a blank image. Use the same array as both the image and the mask. The tiles should subdivide along the curved edge to trace out a smooth circle. """ from skimage import draw import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import numpy as np import photomosaic as pm img = np.zeros((1000, 1000)) rr, cc = draw.circle(300, 500, 150) img[rr, cc] = 1 tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3) plt.imshow(pm.draw_tile_layout(img, tiles, color=0.5)) plt.savefig('test-partition.png')
""" Draw a solid circle off center in a blank image. Use the same array as both the image and the mask. The tiles should subdivide along the curved edge to trace out a smooth circle. """ from skimage import draw import matplotlib.pyplot as plt import numpy as np import photomosaic as pm img = np.zeros((1000, 1000)) rr, cc = draw.circle(300, 500, 150) img[rr, cc] = 1 tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3) plt.imshow(pm.draw_tiles(img, tiles, color=0.5)) plt.savefig('test-partition.png') TST: Update test to match API.""" Draw a solid circle off center in a blank image. Use the same array as both the image and the mask. The tiles should subdivide along the curved edge to trace out a smooth circle. """ from skimage import draw import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import numpy as np import photomosaic as pm img = np.zeros((1000, 1000)) rr, cc = draw.circle(300, 500, 150) img[rr, cc] = 1 tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3) plt.imshow(pm.draw_tile_layout(img, tiles, color=0.5)) plt.savefig('test-partition.png')
<commit_before>""" Draw a solid circle off center in a blank image. Use the same array as both the image and the mask. The tiles should subdivide along the curved edge to trace out a smooth circle. """ from skimage import draw import matplotlib.pyplot as plt import numpy as np import photomosaic as pm img = np.zeros((1000, 1000)) rr, cc = draw.circle(300, 500, 150) img[rr, cc] = 1 tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3) plt.imshow(pm.draw_tiles(img, tiles, color=0.5)) plt.savefig('test-partition.png') <commit_msg>TST: Update test to match API.<commit_after>""" Draw a solid circle off center in a blank image. Use the same array as both the image and the mask. The tiles should subdivide along the curved edge to trace out a smooth circle. """ from skimage import draw import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import numpy as np import photomosaic as pm img = np.zeros((1000, 1000)) rr, cc = draw.circle(300, 500, 150) img[rr, cc] = 1 tiles = pm.partition(img, (10, 10), mask=img.astype(bool), depth=3) plt.imshow(pm.draw_tile_layout(img, tiles, color=0.5)) plt.savefig('test-partition.png')
8e1e6624fb9120b3f26ac373dc48e877240cccac
bootcamp/lesson5.py
bootcamp/lesson5.py
import datetime import csv # Question 1 # ---------- # Using the csv data file users.csv aggregate app users as well as registration date by month. The count of app # users should be one dictionary while the count of registration month should be another dictionary. There will be # no checking or test harness so simply print your results to the console. # Example: # d1 = { # 'RECEIPT_HOG': 3325, # 'SHOPAROO': 38, # 'RECEIPT_LOTTERY': 820, # 'RECEIPT_BIN': 3208 # } # d2 = { # 'Jan': 3852, # 'Feb': 38525, # etc... # } def bigdata(): # Write code here pass def main(): bigdata() if __name__ == '__main__': main()
import datetime import csv # Question 1 # ---------- # Using the csv data file users.csv aggregate app users as well as registration date by month. The count of app # users should be one dictionary while the count of registration month should be another dictionary. There will be # no checking or test harness so simply print your results to the console in the display_results function. # Example: # d1 = { # 'RECEIPT_HOG': 3325, # 'SHOPAROO': 38, # 'RECEIPT_LOTTERY': 820, # 'RECEIPT_BIN': 3208 # } # d2 = { # 'Jan': 3852, # 'Feb': 38525, # etc... # } def bigdata(): # Write code here pass def display_results(d1, d2): # Write code here pass def main(): bigdata() if __name__ == '__main__': main()
Add print function to demonstrate func calling
Add print function to demonstrate func calling
Python
mit
infoscout/python-bootcamp-pv
import datetime import csv # Question 1 # ---------- # Using the csv data file users.csv aggregate app users as well as registration date by month. The count of app # users should be one dictionary while the count of registration month should be another dictionary. There will be # no checking or test harness so simply print your results to the console. # Example: # d1 = { # 'RECEIPT_HOG': 3325, # 'SHOPAROO': 38, # 'RECEIPT_LOTTERY': 820, # 'RECEIPT_BIN': 3208 # } # d2 = { # 'Jan': 3852, # 'Feb': 38525, # etc... # } def bigdata(): # Write code here pass def main(): bigdata() if __name__ == '__main__': main() Add print function to demonstrate func calling
import datetime import csv # Question 1 # ---------- # Using the csv data file users.csv aggregate app users as well as registration date by month. The count of app # users should be one dictionary while the count of registration month should be another dictionary. There will be # no checking or test harness so simply print your results to the console in the display_results function. # Example: # d1 = { # 'RECEIPT_HOG': 3325, # 'SHOPAROO': 38, # 'RECEIPT_LOTTERY': 820, # 'RECEIPT_BIN': 3208 # } # d2 = { # 'Jan': 3852, # 'Feb': 38525, # etc... # } def bigdata(): # Write code here pass def display_results(d1, d2): # Write code here pass def main(): bigdata() if __name__ == '__main__': main()
<commit_before>import datetime import csv # Question 1 # ---------- # Using the csv data file users.csv aggregate app users as well as registration date by month. The count of app # users should be one dictionary while the count of registration month should be another dictionary. There will be # no checking or test harness so simply print your results to the console. # Example: # d1 = { # 'RECEIPT_HOG': 3325, # 'SHOPAROO': 38, # 'RECEIPT_LOTTERY': 820, # 'RECEIPT_BIN': 3208 # } # d2 = { # 'Jan': 3852, # 'Feb': 38525, # etc... # } def bigdata(): # Write code here pass def main(): bigdata() if __name__ == '__main__': main() <commit_msg>Add print function to demonstrate func calling<commit_after>
import datetime import csv # Question 1 # ---------- # Using the csv data file users.csv aggregate app users as well as registration date by month. The count of app # users should be one dictionary while the count of registration month should be another dictionary. There will be # no checking or test harness so simply print your results to the console in the display_results function. # Example: # d1 = { # 'RECEIPT_HOG': 3325, # 'SHOPAROO': 38, # 'RECEIPT_LOTTERY': 820, # 'RECEIPT_BIN': 3208 # } # d2 = { # 'Jan': 3852, # 'Feb': 38525, # etc... # } def bigdata(): # Write code here pass def display_results(d1, d2): # Write code here pass def main(): bigdata() if __name__ == '__main__': main()
import datetime import csv # Question 1 # ---------- # Using the csv data file users.csv aggregate app users as well as registration date by month. The count of app # users should be one dictionary while the count of registration month should be another dictionary. There will be # no checking or test harness so simply print your results to the console. # Example: # d1 = { # 'RECEIPT_HOG': 3325, # 'SHOPAROO': 38, # 'RECEIPT_LOTTERY': 820, # 'RECEIPT_BIN': 3208 # } # d2 = { # 'Jan': 3852, # 'Feb': 38525, # etc... # } def bigdata(): # Write code here pass def main(): bigdata() if __name__ == '__main__': main() Add print function to demonstrate func callingimport datetime import csv # Question 1 # ---------- # Using the csv data file users.csv aggregate app users as well as registration date by month. The count of app # users should be one dictionary while the count of registration month should be another dictionary. There will be # no checking or test harness so simply print your results to the console in the display_results function. # Example: # d1 = { # 'RECEIPT_HOG': 3325, # 'SHOPAROO': 38, # 'RECEIPT_LOTTERY': 820, # 'RECEIPT_BIN': 3208 # } # d2 = { # 'Jan': 3852, # 'Feb': 38525, # etc... # } def bigdata(): # Write code here pass def display_results(d1, d2): # Write code here pass def main(): bigdata() if __name__ == '__main__': main()
<commit_before>import datetime import csv # Question 1 # ---------- # Using the csv data file users.csv aggregate app users as well as registration date by month. The count of app # users should be one dictionary while the count of registration month should be another dictionary. There will be # no checking or test harness so simply print your results to the console. # Example: # d1 = { # 'RECEIPT_HOG': 3325, # 'SHOPAROO': 38, # 'RECEIPT_LOTTERY': 820, # 'RECEIPT_BIN': 3208 # } # d2 = { # 'Jan': 3852, # 'Feb': 38525, # etc... # } def bigdata(): # Write code here pass def main(): bigdata() if __name__ == '__main__': main() <commit_msg>Add print function to demonstrate func calling<commit_after>import datetime import csv # Question 1 # ---------- # Using the csv data file users.csv aggregate app users as well as registration date by month. The count of app # users should be one dictionary while the count of registration month should be another dictionary. There will be # no checking or test harness so simply print your results to the console in the display_results function. # Example: # d1 = { # 'RECEIPT_HOG': 3325, # 'SHOPAROO': 38, # 'RECEIPT_LOTTERY': 820, # 'RECEIPT_BIN': 3208 # } # d2 = { # 'Jan': 3852, # 'Feb': 38525, # etc... # } def bigdata(): # Write code here pass def display_results(d1, d2): # Write code here pass def main(): bigdata() if __name__ == '__main__': main()
51c97b17220e8fc6334d2dce1fd945ee1861385e
healthcheck/utils.py
healthcheck/utils.py
import errno import os def file_exists(path): """Return True if a file exists at `path` (even if it can't be read), otherwise False. This is different from os.path.isfile and os.path.exists which return False if a file exists but the user doesn't have permission to read it. """ try: os.stat(path) return True except OSError as e: # Permission denied: someone chose the wrong permissions but it exists if e.errno == errno.EACCES: return True # File doesn't exist elif e.errno == errno.ENOENT: return False # Unknown case raise
import errno import os def file_exists(path): """Return True if a file exists at `path` (even if it can't be read), otherwise False. This is different from os.path.isfile and os.path.exists which return False if a file exists but the user doesn't have permission to read it. """ try: os.stat(path) return True except OSError as e: # Permission denied: someone chose the wrong permissions but it exists. if e.errno == errno.EACCES: return True # File doesn't exist elif e.errno == errno.ENOENT: return False # Unknown case raise
Add period to end of long comment
Add period to end of long comment
Python
mit
yola/healthcheck
import errno import os def file_exists(path): """Return True if a file exists at `path` (even if it can't be read), otherwise False. This is different from os.path.isfile and os.path.exists which return False if a file exists but the user doesn't have permission to read it. """ try: os.stat(path) return True except OSError as e: # Permission denied: someone chose the wrong permissions but it exists if e.errno == errno.EACCES: return True # File doesn't exist elif e.errno == errno.ENOENT: return False # Unknown case raise Add period to end of long comment
import errno import os def file_exists(path): """Return True if a file exists at `path` (even if it can't be read), otherwise False. This is different from os.path.isfile and os.path.exists which return False if a file exists but the user doesn't have permission to read it. """ try: os.stat(path) return True except OSError as e: # Permission denied: someone chose the wrong permissions but it exists. if e.errno == errno.EACCES: return True # File doesn't exist elif e.errno == errno.ENOENT: return False # Unknown case raise
<commit_before>import errno import os def file_exists(path): """Return True if a file exists at `path` (even if it can't be read), otherwise False. This is different from os.path.isfile and os.path.exists which return False if a file exists but the user doesn't have permission to read it. """ try: os.stat(path) return True except OSError as e: # Permission denied: someone chose the wrong permissions but it exists if e.errno == errno.EACCES: return True # File doesn't exist elif e.errno == errno.ENOENT: return False # Unknown case raise <commit_msg>Add period to end of long comment<commit_after>
import errno import os def file_exists(path): """Return True if a file exists at `path` (even if it can't be read), otherwise False. This is different from os.path.isfile and os.path.exists which return False if a file exists but the user doesn't have permission to read it. """ try: os.stat(path) return True except OSError as e: # Permission denied: someone chose the wrong permissions but it exists. if e.errno == errno.EACCES: return True # File doesn't exist elif e.errno == errno.ENOENT: return False # Unknown case raise
import errno import os def file_exists(path): """Return True if a file exists at `path` (even if it can't be read), otherwise False. This is different from os.path.isfile and os.path.exists which return False if a file exists but the user doesn't have permission to read it. """ try: os.stat(path) return True except OSError as e: # Permission denied: someone chose the wrong permissions but it exists if e.errno == errno.EACCES: return True # File doesn't exist elif e.errno == errno.ENOENT: return False # Unknown case raise Add period to end of long commentimport errno import os def file_exists(path): """Return True if a file exists at `path` (even if it can't be read), otherwise False. This is different from os.path.isfile and os.path.exists which return False if a file exists but the user doesn't have permission to read it. """ try: os.stat(path) return True except OSError as e: # Permission denied: someone chose the wrong permissions but it exists. if e.errno == errno.EACCES: return True # File doesn't exist elif e.errno == errno.ENOENT: return False # Unknown case raise
<commit_before>import errno import os def file_exists(path): """Return True if a file exists at `path` (even if it can't be read), otherwise False. This is different from os.path.isfile and os.path.exists which return False if a file exists but the user doesn't have permission to read it. """ try: os.stat(path) return True except OSError as e: # Permission denied: someone chose the wrong permissions but it exists if e.errno == errno.EACCES: return True # File doesn't exist elif e.errno == errno.ENOENT: return False # Unknown case raise <commit_msg>Add period to end of long comment<commit_after>import errno import os def file_exists(path): """Return True if a file exists at `path` (even if it can't be read), otherwise False. This is different from os.path.isfile and os.path.exists which return False if a file exists but the user doesn't have permission to read it. """ try: os.stat(path) return True except OSError as e: # Permission denied: someone chose the wrong permissions but it exists. if e.errno == errno.EACCES: return True # File doesn't exist elif e.errno == errno.ENOENT: return False # Unknown case raise
17ffc13ba4a5eab56b66b8fe144cc53e8d02d961
easyedit/TextArea.py
easyedit/TextArea.py
from PyQt5.Qsci import QsciScintilla, QsciLexerPython class TextArea(QsciScintilla): def __init__(self): super().__init__() self.filePath = "Untitled" self.pythonLexer = QsciLexerPython(self) self.setLexer(self.pythonLexer) self.setMargins(1) self.setMarginType(0, QsciScintilla.NumberMargin) self.setUtf8(True) self.setIndentationsUseTabs(False) self.setTabWidth(4) self.setIndentationGuides(False) self.setAutoIndent(True) def changeMarginWidth(self): numLines = self.lines() print(len(str(numLines))) self.setMarginWidth(0, "00" * len(str(numLines))) def updateFont(self, newFont): self.lexer().setFont(newFont)
from PyQt5.Qsci import QsciScintilla, QsciLexerPython class TextArea(QsciScintilla): def __init__(self): super().__init__() self.filePath = "Untitled" self.pythonLexer = QsciLexerPython(self) self.setLexer(self.pythonLexer) self.setMargins(1) self.setMarginType(0, QsciScintilla.NumberMargin) self.setUtf8(True) self.setIndentationsUseTabs(False) self.setTabWidth(4) self.setIndentationGuides(False) self.setAutoIndent(True) def changeMarginWidth(self): numLines = self.lines() self.setMarginWidth(0, "00" * len(str(numLines))) def updateFont(self, newFont): self.lexer().setFont(newFont)
Remove debugging print statement from changeMarginWidth
Remove debugging print statement from changeMarginWidth
Python
mit
msklosak/EasyEdit
from PyQt5.Qsci import QsciScintilla, QsciLexerPython class TextArea(QsciScintilla): def __init__(self): super().__init__() self.filePath = "Untitled" self.pythonLexer = QsciLexerPython(self) self.setLexer(self.pythonLexer) self.setMargins(1) self.setMarginType(0, QsciScintilla.NumberMargin) self.setUtf8(True) self.setIndentationsUseTabs(False) self.setTabWidth(4) self.setIndentationGuides(False) self.setAutoIndent(True) def changeMarginWidth(self): numLines = self.lines() print(len(str(numLines))) self.setMarginWidth(0, "00" * len(str(numLines))) def updateFont(self, newFont): self.lexer().setFont(newFont) Remove debugging print statement from changeMarginWidth
from PyQt5.Qsci import QsciScintilla, QsciLexerPython class TextArea(QsciScintilla): def __init__(self): super().__init__() self.filePath = "Untitled" self.pythonLexer = QsciLexerPython(self) self.setLexer(self.pythonLexer) self.setMargins(1) self.setMarginType(0, QsciScintilla.NumberMargin) self.setUtf8(True) self.setIndentationsUseTabs(False) self.setTabWidth(4) self.setIndentationGuides(False) self.setAutoIndent(True) def changeMarginWidth(self): numLines = self.lines() self.setMarginWidth(0, "00" * len(str(numLines))) def updateFont(self, newFont): self.lexer().setFont(newFont)
<commit_before>from PyQt5.Qsci import QsciScintilla, QsciLexerPython class TextArea(QsciScintilla): def __init__(self): super().__init__() self.filePath = "Untitled" self.pythonLexer = QsciLexerPython(self) self.setLexer(self.pythonLexer) self.setMargins(1) self.setMarginType(0, QsciScintilla.NumberMargin) self.setUtf8(True) self.setIndentationsUseTabs(False) self.setTabWidth(4) self.setIndentationGuides(False) self.setAutoIndent(True) def changeMarginWidth(self): numLines = self.lines() print(len(str(numLines))) self.setMarginWidth(0, "00" * len(str(numLines))) def updateFont(self, newFont): self.lexer().setFont(newFont) <commit_msg>Remove debugging print statement from changeMarginWidth<commit_after>
from PyQt5.Qsci import QsciScintilla, QsciLexerPython class TextArea(QsciScintilla): def __init__(self): super().__init__() self.filePath = "Untitled" self.pythonLexer = QsciLexerPython(self) self.setLexer(self.pythonLexer) self.setMargins(1) self.setMarginType(0, QsciScintilla.NumberMargin) self.setUtf8(True) self.setIndentationsUseTabs(False) self.setTabWidth(4) self.setIndentationGuides(False) self.setAutoIndent(True) def changeMarginWidth(self): numLines = self.lines() self.setMarginWidth(0, "00" * len(str(numLines))) def updateFont(self, newFont): self.lexer().setFont(newFont)
from PyQt5.Qsci import QsciScintilla, QsciLexerPython class TextArea(QsciScintilla): def __init__(self): super().__init__() self.filePath = "Untitled" self.pythonLexer = QsciLexerPython(self) self.setLexer(self.pythonLexer) self.setMargins(1) self.setMarginType(0, QsciScintilla.NumberMargin) self.setUtf8(True) self.setIndentationsUseTabs(False) self.setTabWidth(4) self.setIndentationGuides(False) self.setAutoIndent(True) def changeMarginWidth(self): numLines = self.lines() print(len(str(numLines))) self.setMarginWidth(0, "00" * len(str(numLines))) def updateFont(self, newFont): self.lexer().setFont(newFont) Remove debugging print statement from changeMarginWidthfrom PyQt5.Qsci import QsciScintilla, QsciLexerPython class TextArea(QsciScintilla): def __init__(self): super().__init__() self.filePath = "Untitled" self.pythonLexer = QsciLexerPython(self) self.setLexer(self.pythonLexer) self.setMargins(1) self.setMarginType(0, QsciScintilla.NumberMargin) self.setUtf8(True) self.setIndentationsUseTabs(False) self.setTabWidth(4) self.setIndentationGuides(False) self.setAutoIndent(True) def changeMarginWidth(self): numLines = self.lines() self.setMarginWidth(0, "00" * len(str(numLines))) def updateFont(self, newFont): self.lexer().setFont(newFont)
<commit_before>from PyQt5.Qsci import QsciScintilla, QsciLexerPython class TextArea(QsciScintilla): def __init__(self): super().__init__() self.filePath = "Untitled" self.pythonLexer = QsciLexerPython(self) self.setLexer(self.pythonLexer) self.setMargins(1) self.setMarginType(0, QsciScintilla.NumberMargin) self.setUtf8(True) self.setIndentationsUseTabs(False) self.setTabWidth(4) self.setIndentationGuides(False) self.setAutoIndent(True) def changeMarginWidth(self): numLines = self.lines() print(len(str(numLines))) self.setMarginWidth(0, "00" * len(str(numLines))) def updateFont(self, newFont): self.lexer().setFont(newFont) <commit_msg>Remove debugging print statement from changeMarginWidth<commit_after>from PyQt5.Qsci import QsciScintilla, QsciLexerPython class TextArea(QsciScintilla): def __init__(self): super().__init__() self.filePath = "Untitled" self.pythonLexer = QsciLexerPython(self) self.setLexer(self.pythonLexer) self.setMargins(1) self.setMarginType(0, QsciScintilla.NumberMargin) self.setUtf8(True) self.setIndentationsUseTabs(False) self.setTabWidth(4) self.setIndentationGuides(False) self.setAutoIndent(True) def changeMarginWidth(self): numLines = self.lines() self.setMarginWidth(0, "00" * len(str(numLines))) def updateFont(self, newFont): self.lexer().setFont(newFont)
3f5b1e830eff73cdff013a423b647c795e21bef2
captcha/fields.py
captcha/fields.py
from django.conf import settings from django import forms from django.utils.encoding import smart_unicode from django.utils.translation import ugettext_lazy as _ from recaptcha.client import captcha from captcha.widgets import ReCaptcha class ReCaptchaField(forms.CharField): default_error_messages = { 'captcha_invalid': _(u'Invalid captcha') } def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def clean(self, values): super(ReCaptchaField, self).clean(values[1]) recaptcha_challenge_value = smart_unicode(values[0]) recaptcha_response_value = smart_unicode(values[1]) check_captcha = captcha.submit(recaptcha_challenge_value, recaptcha_response_value, settings.RECAPTCHA_PRIVATE_KEY, {}) if self.required and not check_captcha.is_valid: raise forms.util.ValidationError( self.error_messages['captcha_invalid']) return values[0]
from django.conf import settings from django import forms from django.utils.encoding import smart_unicode from django.utils.translation import ugettext_lazy as _ from recaptcha.client import captcha from captcha.widgets import ReCaptcha class ReCaptchaField(forms.CharField): default_error_messages = { 'captcha_invalid': _(u'Invalid captcha') } def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def clean(self, values): super(ReCaptchaField, self).clean(values[1]) recaptcha_challenge_value = smart_unicode(values[0]) recaptcha_response_value = smart_unicode(values[1]) check_captcha = captcha.submit(recaptcha_challenge_value, recaptcha_response_value, settings.RECAPTCHA_PRIVATE_KEY, {}) if not check_captcha.is_valid: raise forms.util.ValidationError( self.error_messages['captcha_invalid']) return values[0]
Revert "Enforce valid captcha only if required, so tests can relax captcha requirement"
Revert "Enforce valid captcha only if required, so tests can relax captcha requirement" This reverts commit c3c450b1a7070a1dd1b808e55371e838dd297857. It wasn't such a great idea.
Python
bsd-3-clause
mozilla/django-recaptcha
from django.conf import settings from django import forms from django.utils.encoding import smart_unicode from django.utils.translation import ugettext_lazy as _ from recaptcha.client import captcha from captcha.widgets import ReCaptcha class ReCaptchaField(forms.CharField): default_error_messages = { 'captcha_invalid': _(u'Invalid captcha') } def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def clean(self, values): super(ReCaptchaField, self).clean(values[1]) recaptcha_challenge_value = smart_unicode(values[0]) recaptcha_response_value = smart_unicode(values[1]) check_captcha = captcha.submit(recaptcha_challenge_value, recaptcha_response_value, settings.RECAPTCHA_PRIVATE_KEY, {}) if self.required and not check_captcha.is_valid: raise forms.util.ValidationError( self.error_messages['captcha_invalid']) return values[0] Revert "Enforce valid captcha only if required, so tests can relax captcha requirement" This reverts commit c3c450b1a7070a1dd1b808e55371e838dd297857. It wasn't such a great idea.
from django.conf import settings from django import forms from django.utils.encoding import smart_unicode from django.utils.translation import ugettext_lazy as _ from recaptcha.client import captcha from captcha.widgets import ReCaptcha class ReCaptchaField(forms.CharField): default_error_messages = { 'captcha_invalid': _(u'Invalid captcha') } def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def clean(self, values): super(ReCaptchaField, self).clean(values[1]) recaptcha_challenge_value = smart_unicode(values[0]) recaptcha_response_value = smart_unicode(values[1]) check_captcha = captcha.submit(recaptcha_challenge_value, recaptcha_response_value, settings.RECAPTCHA_PRIVATE_KEY, {}) if not check_captcha.is_valid: raise forms.util.ValidationError( self.error_messages['captcha_invalid']) return values[0]
<commit_before>from django.conf import settings from django import forms from django.utils.encoding import smart_unicode from django.utils.translation import ugettext_lazy as _ from recaptcha.client import captcha from captcha.widgets import ReCaptcha class ReCaptchaField(forms.CharField): default_error_messages = { 'captcha_invalid': _(u'Invalid captcha') } def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def clean(self, values): super(ReCaptchaField, self).clean(values[1]) recaptcha_challenge_value = smart_unicode(values[0]) recaptcha_response_value = smart_unicode(values[1]) check_captcha = captcha.submit(recaptcha_challenge_value, recaptcha_response_value, settings.RECAPTCHA_PRIVATE_KEY, {}) if self.required and not check_captcha.is_valid: raise forms.util.ValidationError( self.error_messages['captcha_invalid']) return values[0] <commit_msg>Revert "Enforce valid captcha only if required, so tests can relax captcha requirement" This reverts commit c3c450b1a7070a1dd1b808e55371e838dd297857. It wasn't such a great idea.<commit_after>
from django.conf import settings from django import forms from django.utils.encoding import smart_unicode from django.utils.translation import ugettext_lazy as _ from recaptcha.client import captcha from captcha.widgets import ReCaptcha class ReCaptchaField(forms.CharField): default_error_messages = { 'captcha_invalid': _(u'Invalid captcha') } def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def clean(self, values): super(ReCaptchaField, self).clean(values[1]) recaptcha_challenge_value = smart_unicode(values[0]) recaptcha_response_value = smart_unicode(values[1]) check_captcha = captcha.submit(recaptcha_challenge_value, recaptcha_response_value, settings.RECAPTCHA_PRIVATE_KEY, {}) if not check_captcha.is_valid: raise forms.util.ValidationError( self.error_messages['captcha_invalid']) return values[0]
from django.conf import settings from django import forms from django.utils.encoding import smart_unicode from django.utils.translation import ugettext_lazy as _ from recaptcha.client import captcha from captcha.widgets import ReCaptcha class ReCaptchaField(forms.CharField): default_error_messages = { 'captcha_invalid': _(u'Invalid captcha') } def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def clean(self, values): super(ReCaptchaField, self).clean(values[1]) recaptcha_challenge_value = smart_unicode(values[0]) recaptcha_response_value = smart_unicode(values[1]) check_captcha = captcha.submit(recaptcha_challenge_value, recaptcha_response_value, settings.RECAPTCHA_PRIVATE_KEY, {}) if self.required and not check_captcha.is_valid: raise forms.util.ValidationError( self.error_messages['captcha_invalid']) return values[0] Revert "Enforce valid captcha only if required, so tests can relax captcha requirement" This reverts commit c3c450b1a7070a1dd1b808e55371e838dd297857. It wasn't such a great idea.from django.conf import settings from django import forms from django.utils.encoding import smart_unicode from django.utils.translation import ugettext_lazy as _ from recaptcha.client import captcha from captcha.widgets import ReCaptcha class ReCaptchaField(forms.CharField): default_error_messages = { 'captcha_invalid': _(u'Invalid captcha') } def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def clean(self, values): super(ReCaptchaField, self).clean(values[1]) recaptcha_challenge_value = smart_unicode(values[0]) recaptcha_response_value = smart_unicode(values[1]) check_captcha = captcha.submit(recaptcha_challenge_value, recaptcha_response_value, settings.RECAPTCHA_PRIVATE_KEY, {}) if not check_captcha.is_valid: raise forms.util.ValidationError( self.error_messages['captcha_invalid']) return values[0]
<commit_before>from django.conf import settings from django import forms from django.utils.encoding import smart_unicode from django.utils.translation import ugettext_lazy as _ from recaptcha.client import captcha from captcha.widgets import ReCaptcha class ReCaptchaField(forms.CharField): default_error_messages = { 'captcha_invalid': _(u'Invalid captcha') } def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def clean(self, values): super(ReCaptchaField, self).clean(values[1]) recaptcha_challenge_value = smart_unicode(values[0]) recaptcha_response_value = smart_unicode(values[1]) check_captcha = captcha.submit(recaptcha_challenge_value, recaptcha_response_value, settings.RECAPTCHA_PRIVATE_KEY, {}) if self.required and not check_captcha.is_valid: raise forms.util.ValidationError( self.error_messages['captcha_invalid']) return values[0] <commit_msg>Revert "Enforce valid captcha only if required, so tests can relax captcha requirement" This reverts commit c3c450b1a7070a1dd1b808e55371e838dd297857. It wasn't such a great idea.<commit_after>from django.conf import settings from django import forms from django.utils.encoding import smart_unicode from django.utils.translation import ugettext_lazy as _ from recaptcha.client import captcha from captcha.widgets import ReCaptcha class ReCaptchaField(forms.CharField): default_error_messages = { 'captcha_invalid': _(u'Invalid captcha') } def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def __init__(self, *args, **kwargs): self.widget = ReCaptcha self.required = True super(ReCaptchaField, self).__init__(*args, **kwargs) def clean(self, values): super(ReCaptchaField, self).clean(values[1]) recaptcha_challenge_value = smart_unicode(values[0]) recaptcha_response_value = smart_unicode(values[1]) check_captcha = captcha.submit(recaptcha_challenge_value, recaptcha_response_value, settings.RECAPTCHA_PRIVATE_KEY, {}) if not check_captcha.is_valid: raise forms.util.ValidationError( self.error_messages['captcha_invalid']) return values[0]
b48984747d0f33f8ad9a8721bf7489d8ff97c157
matador/commands/deploy_ticket.py
matador/commands/deploy_ticket.py
#!/usr/bin/env python from .command import Command from matador import utils class DeployTicket(Command): def _add_arguments(self, parser): parser.prog = 'matador deploy-ticket' parser.add_argument( '-e', '--environment', type=str, required=True, help='Agresso environment name') def _execute(self): project = utils.project() utils.update_repository(project)
#!/usr/bin/env python from .command import Command from matador import utils class DeployTicket(Command): def _add_arguments(self, parser): parser.prog = 'matador deploy-ticket' parser.add_argument( '-e', '--environment', type=str, required=True, help='Agresso environment name') parser.add_argument( '-', '--package', type=bool, default=False, help='Agresso environment name') def _execute(self): project = utils.project() if not self.args.package: utils.update_repository(project)
Add package argument to deploy-ticket
Add package argument to deploy-ticket
Python
mit
Empiria/matador
#!/usr/bin/env python from .command import Command from matador import utils class DeployTicket(Command): def _add_arguments(self, parser): parser.prog = 'matador deploy-ticket' parser.add_argument( '-e', '--environment', type=str, required=True, help='Agresso environment name') def _execute(self): project = utils.project() utils.update_repository(project) Add package argument to deploy-ticket
#!/usr/bin/env python from .command import Command from matador import utils class DeployTicket(Command): def _add_arguments(self, parser): parser.prog = 'matador deploy-ticket' parser.add_argument( '-e', '--environment', type=str, required=True, help='Agresso environment name') parser.add_argument( '-', '--package', type=bool, default=False, help='Agresso environment name') def _execute(self): project = utils.project() if not self.args.package: utils.update_repository(project)
<commit_before>#!/usr/bin/env python from .command import Command from matador import utils class DeployTicket(Command): def _add_arguments(self, parser): parser.prog = 'matador deploy-ticket' parser.add_argument( '-e', '--environment', type=str, required=True, help='Agresso environment name') def _execute(self): project = utils.project() utils.update_repository(project) <commit_msg>Add package argument to deploy-ticket<commit_after>
#!/usr/bin/env python from .command import Command from matador import utils class DeployTicket(Command): def _add_arguments(self, parser): parser.prog = 'matador deploy-ticket' parser.add_argument( '-e', '--environment', type=str, required=True, help='Agresso environment name') parser.add_argument( '-', '--package', type=bool, default=False, help='Agresso environment name') def _execute(self): project = utils.project() if not self.args.package: utils.update_repository(project)
#!/usr/bin/env python from .command import Command from matador import utils class DeployTicket(Command): def _add_arguments(self, parser): parser.prog = 'matador deploy-ticket' parser.add_argument( '-e', '--environment', type=str, required=True, help='Agresso environment name') def _execute(self): project = utils.project() utils.update_repository(project) Add package argument to deploy-ticket#!/usr/bin/env python from .command import Command from matador import utils class DeployTicket(Command): def _add_arguments(self, parser): parser.prog = 'matador deploy-ticket' parser.add_argument( '-e', '--environment', type=str, required=True, help='Agresso environment name') parser.add_argument( '-', '--package', type=bool, default=False, help='Agresso environment name') def _execute(self): project = utils.project() if not self.args.package: utils.update_repository(project)
<commit_before>#!/usr/bin/env python from .command import Command from matador import utils class DeployTicket(Command): def _add_arguments(self, parser): parser.prog = 'matador deploy-ticket' parser.add_argument( '-e', '--environment', type=str, required=True, help='Agresso environment name') def _execute(self): project = utils.project() utils.update_repository(project) <commit_msg>Add package argument to deploy-ticket<commit_after>#!/usr/bin/env python from .command import Command from matador import utils class DeployTicket(Command): def _add_arguments(self, parser): parser.prog = 'matador deploy-ticket' parser.add_argument( '-e', '--environment', type=str, required=True, help='Agresso environment name') parser.add_argument( '-', '--package', type=bool, default=False, help='Agresso environment name') def _execute(self): project = utils.project() if not self.args.package: utils.update_repository(project)
f6ce19f558bd298a6f0651ead865b65da3a2c479
spiff/sensors/tests.py
spiff/sensors/tests.py
from django.test import TestCase from spiff.api.tests import APITestMixin, withPermission import models class SensorTest(APITestMixin): def setUp(self): self.setupAPI() self.sensor = models.Sensor.objects.create( name = 'sensor', description = 'Test sensor', type = 0, ttl = 255 ) @withPermission('sensors.read_sensor') @withPermission('sensors.update_value_on_sensor') def testSetSensorValue(self): self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': True, }) self.assertEqual(self.sensor.value(), 'True') self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': False, }) self.assertEqual(self.sensor.value(), 'False') def testSensorTTL(self): for i in range(0, self.sensor.ttl*2): models.SensorValue.objects.create( sensor=self.sensor, value=True ) self.assertEqual(len(self.sensor.values.all()), self.sensor.ttl)
from django.test import TestCase from spiff.api.tests import APITestMixin, withPermission import models class SensorTest(APITestMixin): def setUp(self): self.setupAPI() self.sensor = models.Sensor.objects.create( name = 'sensor', description = 'Test sensor', type = models.SENSOR_TYPE_BOOLEAN, ttl = 255 ) @withPermission('sensors.read_sensor') @withPermission('sensors.update_value_on_sensor') def testSetSensorValue(self): self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': True, }) sensor = self.getAPI('/v1/sensor/%s/'%(self.sensor.id)) self.assertEqual(sensor['value'], True) self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': False, }) sensor = self.getAPI('/v1/sensor/%s/'%(self.sensor.id)) self.assertEqual(sensor['value'], False) def testSensorTTL(self): for i in range(0, self.sensor.ttl*2): models.SensorValue.objects.create( sensor=self.sensor, value=True ) self.assertEqual(len(self.sensor.values.all()), self.sensor.ttl)
Use a bool sensor for testing, and query the API instead of models
Use a bool sensor for testing, and query the API instead of models
Python
agpl-3.0
SYNHAK/spiff,SYNHAK/spiff,SYNHAK/spiff
from django.test import TestCase from spiff.api.tests import APITestMixin, withPermission import models class SensorTest(APITestMixin): def setUp(self): self.setupAPI() self.sensor = models.Sensor.objects.create( name = 'sensor', description = 'Test sensor', type = 0, ttl = 255 ) @withPermission('sensors.read_sensor') @withPermission('sensors.update_value_on_sensor') def testSetSensorValue(self): self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': True, }) self.assertEqual(self.sensor.value(), 'True') self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': False, }) self.assertEqual(self.sensor.value(), 'False') def testSensorTTL(self): for i in range(0, self.sensor.ttl*2): models.SensorValue.objects.create( sensor=self.sensor, value=True ) self.assertEqual(len(self.sensor.values.all()), self.sensor.ttl) Use a bool sensor for testing, and query the API instead of models
from django.test import TestCase from spiff.api.tests import APITestMixin, withPermission import models class SensorTest(APITestMixin): def setUp(self): self.setupAPI() self.sensor = models.Sensor.objects.create( name = 'sensor', description = 'Test sensor', type = models.SENSOR_TYPE_BOOLEAN, ttl = 255 ) @withPermission('sensors.read_sensor') @withPermission('sensors.update_value_on_sensor') def testSetSensorValue(self): self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': True, }) sensor = self.getAPI('/v1/sensor/%s/'%(self.sensor.id)) self.assertEqual(sensor['value'], True) self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': False, }) sensor = self.getAPI('/v1/sensor/%s/'%(self.sensor.id)) self.assertEqual(sensor['value'], False) def testSensorTTL(self): for i in range(0, self.sensor.ttl*2): models.SensorValue.objects.create( sensor=self.sensor, value=True ) self.assertEqual(len(self.sensor.values.all()), self.sensor.ttl)
<commit_before>from django.test import TestCase from spiff.api.tests import APITestMixin, withPermission import models class SensorTest(APITestMixin): def setUp(self): self.setupAPI() self.sensor = models.Sensor.objects.create( name = 'sensor', description = 'Test sensor', type = 0, ttl = 255 ) @withPermission('sensors.read_sensor') @withPermission('sensors.update_value_on_sensor') def testSetSensorValue(self): self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': True, }) self.assertEqual(self.sensor.value(), 'True') self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': False, }) self.assertEqual(self.sensor.value(), 'False') def testSensorTTL(self): for i in range(0, self.sensor.ttl*2): models.SensorValue.objects.create( sensor=self.sensor, value=True ) self.assertEqual(len(self.sensor.values.all()), self.sensor.ttl) <commit_msg>Use a bool sensor for testing, and query the API instead of models<commit_after>
from django.test import TestCase from spiff.api.tests import APITestMixin, withPermission import models class SensorTest(APITestMixin): def setUp(self): self.setupAPI() self.sensor = models.Sensor.objects.create( name = 'sensor', description = 'Test sensor', type = models.SENSOR_TYPE_BOOLEAN, ttl = 255 ) @withPermission('sensors.read_sensor') @withPermission('sensors.update_value_on_sensor') def testSetSensorValue(self): self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': True, }) sensor = self.getAPI('/v1/sensor/%s/'%(self.sensor.id)) self.assertEqual(sensor['value'], True) self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': False, }) sensor = self.getAPI('/v1/sensor/%s/'%(self.sensor.id)) self.assertEqual(sensor['value'], False) def testSensorTTL(self): for i in range(0, self.sensor.ttl*2): models.SensorValue.objects.create( sensor=self.sensor, value=True ) self.assertEqual(len(self.sensor.values.all()), self.sensor.ttl)
from django.test import TestCase from spiff.api.tests import APITestMixin, withPermission import models class SensorTest(APITestMixin): def setUp(self): self.setupAPI() self.sensor = models.Sensor.objects.create( name = 'sensor', description = 'Test sensor', type = 0, ttl = 255 ) @withPermission('sensors.read_sensor') @withPermission('sensors.update_value_on_sensor') def testSetSensorValue(self): self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': True, }) self.assertEqual(self.sensor.value(), 'True') self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': False, }) self.assertEqual(self.sensor.value(), 'False') def testSensorTTL(self): for i in range(0, self.sensor.ttl*2): models.SensorValue.objects.create( sensor=self.sensor, value=True ) self.assertEqual(len(self.sensor.values.all()), self.sensor.ttl) Use a bool sensor for testing, and query the API instead of modelsfrom django.test import TestCase from spiff.api.tests import APITestMixin, withPermission import models class SensorTest(APITestMixin): def setUp(self): self.setupAPI() self.sensor = models.Sensor.objects.create( name = 'sensor', description = 'Test sensor', type = models.SENSOR_TYPE_BOOLEAN, ttl = 255 ) @withPermission('sensors.read_sensor') @withPermission('sensors.update_value_on_sensor') def testSetSensorValue(self): self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': True, }) sensor = self.getAPI('/v1/sensor/%s/'%(self.sensor.id)) self.assertEqual(sensor['value'], True) self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': False, }) sensor = self.getAPI('/v1/sensor/%s/'%(self.sensor.id)) self.assertEqual(sensor['value'], False) def testSensorTTL(self): for i in range(0, self.sensor.ttl*2): models.SensorValue.objects.create( sensor=self.sensor, value=True ) self.assertEqual(len(self.sensor.values.all()), self.sensor.ttl)
<commit_before>from django.test import TestCase from spiff.api.tests import APITestMixin, withPermission import models class SensorTest(APITestMixin): def setUp(self): self.setupAPI() self.sensor = models.Sensor.objects.create( name = 'sensor', description = 'Test sensor', type = 0, ttl = 255 ) @withPermission('sensors.read_sensor') @withPermission('sensors.update_value_on_sensor') def testSetSensorValue(self): self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': True, }) self.assertEqual(self.sensor.value(), 'True') self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': False, }) self.assertEqual(self.sensor.value(), 'False') def testSensorTTL(self): for i in range(0, self.sensor.ttl*2): models.SensorValue.objects.create( sensor=self.sensor, value=True ) self.assertEqual(len(self.sensor.values.all()), self.sensor.ttl) <commit_msg>Use a bool sensor for testing, and query the API instead of models<commit_after>from django.test import TestCase from spiff.api.tests import APITestMixin, withPermission import models class SensorTest(APITestMixin): def setUp(self): self.setupAPI() self.sensor = models.Sensor.objects.create( name = 'sensor', description = 'Test sensor', type = models.SENSOR_TYPE_BOOLEAN, ttl = 255 ) @withPermission('sensors.read_sensor') @withPermission('sensors.update_value_on_sensor') def testSetSensorValue(self): self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': True, }) sensor = self.getAPI('/v1/sensor/%s/'%(self.sensor.id)) self.assertEqual(sensor['value'], True) self.patchAPI('/v1/sensor/%s/'%(self.sensor.id), { 'value': False, }) sensor = self.getAPI('/v1/sensor/%s/'%(self.sensor.id)) self.assertEqual(sensor['value'], False) def testSensorTTL(self): for i in range(0, self.sensor.ttl*2): models.SensorValue.objects.create( sensor=self.sensor, value=True ) self.assertEqual(len(self.sensor.values.all()), self.sensor.ttl)
600fe835ddce18a6aec5702766350003f2f90745
gen-android-icons.py
gen-android-icons.py
__author__ = 'Maksim Dmitriev' import argparse if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--foo', help='foo help', required=True) args = parser.parse_args()
__author__ = 'Maksim Dmitriev' import argparse import os if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-s', '--source', help='the icon to be resized', required=True) parser.add_argument('-d', '--dest', help='the directory where resized icons are saved') parser.add_argument('-f', '--outfile', help='the output file names') args = parser.parse_args() source_image = args.source dest_dir = args.dest if dest_dir is None: os.makedirs(os.path.dirname(os.path.realpath(source_image)) + os.sep + 'out', exist_ok=True)
Create an output directory unless it exists
Create an output directory unless it exists
Python
bsd-3-clause
MaksimDmitriev/Python-Scripts
__author__ = 'Maksim Dmitriev' import argparse if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--foo', help='foo help', required=True) args = parser.parse_args() Create an output directory unless it exists
__author__ = 'Maksim Dmitriev' import argparse import os if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-s', '--source', help='the icon to be resized', required=True) parser.add_argument('-d', '--dest', help='the directory where resized icons are saved') parser.add_argument('-f', '--outfile', help='the output file names') args = parser.parse_args() source_image = args.source dest_dir = args.dest if dest_dir is None: os.makedirs(os.path.dirname(os.path.realpath(source_image)) + os.sep + 'out', exist_ok=True)
<commit_before>__author__ = 'Maksim Dmitriev' import argparse if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--foo', help='foo help', required=True) args = parser.parse_args() <commit_msg>Create an output directory unless it exists<commit_after>
__author__ = 'Maksim Dmitriev' import argparse import os if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-s', '--source', help='the icon to be resized', required=True) parser.add_argument('-d', '--dest', help='the directory where resized icons are saved') parser.add_argument('-f', '--outfile', help='the output file names') args = parser.parse_args() source_image = args.source dest_dir = args.dest if dest_dir is None: os.makedirs(os.path.dirname(os.path.realpath(source_image)) + os.sep + 'out', exist_ok=True)
__author__ = 'Maksim Dmitriev' import argparse if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--foo', help='foo help', required=True) args = parser.parse_args() Create an output directory unless it exists__author__ = 'Maksim Dmitriev' import argparse import os if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-s', '--source', help='the icon to be resized', required=True) parser.add_argument('-d', '--dest', help='the directory where resized icons are saved') parser.add_argument('-f', '--outfile', help='the output file names') args = parser.parse_args() source_image = args.source dest_dir = args.dest if dest_dir is None: os.makedirs(os.path.dirname(os.path.realpath(source_image)) + os.sep + 'out', exist_ok=True)
<commit_before>__author__ = 'Maksim Dmitriev' import argparse if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--foo', help='foo help', required=True) args = parser.parse_args() <commit_msg>Create an output directory unless it exists<commit_after>__author__ = 'Maksim Dmitriev' import argparse import os if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-s', '--source', help='the icon to be resized', required=True) parser.add_argument('-d', '--dest', help='the directory where resized icons are saved') parser.add_argument('-f', '--outfile', help='the output file names') args = parser.parse_args() source_image = args.source dest_dir = args.dest if dest_dir is None: os.makedirs(os.path.dirname(os.path.realpath(source_image)) + os.sep + 'out', exist_ok=True)
195bcf4b834794642a502ce876f023025c91a690
savate/buffer_event.py
savate/buffer_event.py
# -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = self.buffer_queue[0][sent_bytes:] else: self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes
# -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = buffer_slice(self.buffer_queue[0], sent_bytes, -1) else: self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes
Use buffer() to try and avoid memory copies
Use buffer() to try and avoid memory copies
Python
agpl-3.0
noirbee/savate,noirbee/savate
# -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = self.buffer_queue[0][sent_bytes:] else: self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes Use buffer() to try and avoid memory copies
# -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = buffer_slice(self.buffer_queue[0], sent_bytes, -1) else: self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes
<commit_before># -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = self.buffer_queue[0][sent_bytes:] else: self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes <commit_msg>Use buffer() to try and avoid memory copies<commit_after>
# -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = buffer_slice(self.buffer_queue[0], sent_bytes, -1) else: self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes
# -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = self.buffer_queue[0][sent_bytes:] else: self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes Use buffer() to try and avoid memory copies# -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = buffer_slice(self.buffer_queue[0], sent_bytes, -1) else: self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes
<commit_before># -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = self.buffer_queue[0][sent_bytes:] else: self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes <commit_msg>Use buffer() to try and avoid memory copies<commit_after># -*- coding: utf-8 -*- import errno import collections from savate import writev # FIXME: should this be a method of BufferEvent below ? # FIXME: handle Python2.x/Python3k compat here def buffer_slice(buff, offset, size): return buffer(buff, offset, size) class BufferOutputHandler(object): def __init__(self, sock, initial_buffer_queue = ()): self.sock = sock self.ready = True self.buffer_queue = collections.deque(initial_buffer_queue) def add_buffer(self, buff): self.buffer_queue.append(buff) def empty(self): return len(self.buffer_queue) == 0 def flush(self): self.ready = True total_sent_bytes = 0 try: while self.buffer_queue: sent_bytes = self.sock.send(self.buffer_queue[0]) total_sent_bytes += sent_bytes if sent_bytes < len(self.buffer_queue[0]): # One of the buffers was partially sent self.buffer_queue[0] = buffer_slice(self.buffer_queue[0], sent_bytes, -1) else: self.buffer_queue.popleft() except IOError, exc: if exc.errno == errno.EAGAIN: self.ready = False else: raise return total_sent_bytes
90fb352f313b26964adcc587beb8f21deb3395a4
tor.py
tor.py
import socks import socket from stem.control import Controller from stem import Signal class Tor(object): """Tor class for socks proxy and controller""" def __init__(self, socks_port=9050, control_port=9051, control_password=""): self.socks_port = socks_port self.control_port = control_port self.control_password = control_password self.default_socket = socket.socket def connect(self): """connect to Tor socks proxy""" socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", self.socks_port) socket.socket = socks.socksocket def disconnect(self): """disconnect Tor socks proxy""" socket.socket = self.default_socket def change_relay(self): """change Tor relay to obtain new ip""" with Controller.from_port(port=self.control_port) as controller: controller.authenticate(self.control_password) controller.signal(Signal.NEWNYM)
import socks import socket import json from stem.control import Controller from stem import Signal from urllib2 import urlopen class Tor(object): """Tor class for socks proxy and controller""" def __init__(self, socks_port=9050, control_port=9051, control_password=""): self.socks_port = socks_port self.control_port = control_port self.control_password = control_password self.default_socket = socket.socket def connect(self): """connect to Tor socks proxy""" socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", self.socks_port) socket.socket = socks.socksocket def disconnect(self): """disconnect Tor socks proxy""" socket.socket = self.default_socket def change_relay(self): """change Tor relay to obtain new ip""" with Controller.from_port(port=self.control_port) as controller: controller.authenticate(self.control_password) controller.signal(Signal.NEWNYM) def print_ip(): """print ip for debug""" json_str = urlopen('http://ip-api.com/json').read().decode('UTF-8') ip_dict = json.loads(json_str) for key, value in ip_dict.items(): print("%s: %s" % (key, value))
Add print_ip method for debugging
Add print_ip method for debugging
Python
mit
MA3STR0/simpletor
import socks import socket from stem.control import Controller from stem import Signal class Tor(object): """Tor class for socks proxy and controller""" def __init__(self, socks_port=9050, control_port=9051, control_password=""): self.socks_port = socks_port self.control_port = control_port self.control_password = control_password self.default_socket = socket.socket def connect(self): """connect to Tor socks proxy""" socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", self.socks_port) socket.socket = socks.socksocket def disconnect(self): """disconnect Tor socks proxy""" socket.socket = self.default_socket def change_relay(self): """change Tor relay to obtain new ip""" with Controller.from_port(port=self.control_port) as controller: controller.authenticate(self.control_password) controller.signal(Signal.NEWNYM) Add print_ip method for debugging
import socks import socket import json from stem.control import Controller from stem import Signal from urllib2 import urlopen class Tor(object): """Tor class for socks proxy and controller""" def __init__(self, socks_port=9050, control_port=9051, control_password=""): self.socks_port = socks_port self.control_port = control_port self.control_password = control_password self.default_socket = socket.socket def connect(self): """connect to Tor socks proxy""" socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", self.socks_port) socket.socket = socks.socksocket def disconnect(self): """disconnect Tor socks proxy""" socket.socket = self.default_socket def change_relay(self): """change Tor relay to obtain new ip""" with Controller.from_port(port=self.control_port) as controller: controller.authenticate(self.control_password) controller.signal(Signal.NEWNYM) def print_ip(): """print ip for debug""" json_str = urlopen('http://ip-api.com/json').read().decode('UTF-8') ip_dict = json.loads(json_str) for key, value in ip_dict.items(): print("%s: %s" % (key, value))
<commit_before>import socks import socket from stem.control import Controller from stem import Signal class Tor(object): """Tor class for socks proxy and controller""" def __init__(self, socks_port=9050, control_port=9051, control_password=""): self.socks_port = socks_port self.control_port = control_port self.control_password = control_password self.default_socket = socket.socket def connect(self): """connect to Tor socks proxy""" socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", self.socks_port) socket.socket = socks.socksocket def disconnect(self): """disconnect Tor socks proxy""" socket.socket = self.default_socket def change_relay(self): """change Tor relay to obtain new ip""" with Controller.from_port(port=self.control_port) as controller: controller.authenticate(self.control_password) controller.signal(Signal.NEWNYM) <commit_msg>Add print_ip method for debugging<commit_after>
import socks import socket import json from stem.control import Controller from stem import Signal from urllib2 import urlopen class Tor(object): """Tor class for socks proxy and controller""" def __init__(self, socks_port=9050, control_port=9051, control_password=""): self.socks_port = socks_port self.control_port = control_port self.control_password = control_password self.default_socket = socket.socket def connect(self): """connect to Tor socks proxy""" socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", self.socks_port) socket.socket = socks.socksocket def disconnect(self): """disconnect Tor socks proxy""" socket.socket = self.default_socket def change_relay(self): """change Tor relay to obtain new ip""" with Controller.from_port(port=self.control_port) as controller: controller.authenticate(self.control_password) controller.signal(Signal.NEWNYM) def print_ip(): """print ip for debug""" json_str = urlopen('http://ip-api.com/json').read().decode('UTF-8') ip_dict = json.loads(json_str) for key, value in ip_dict.items(): print("%s: %s" % (key, value))
import socks import socket from stem.control import Controller from stem import Signal class Tor(object): """Tor class for socks proxy and controller""" def __init__(self, socks_port=9050, control_port=9051, control_password=""): self.socks_port = socks_port self.control_port = control_port self.control_password = control_password self.default_socket = socket.socket def connect(self): """connect to Tor socks proxy""" socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", self.socks_port) socket.socket = socks.socksocket def disconnect(self): """disconnect Tor socks proxy""" socket.socket = self.default_socket def change_relay(self): """change Tor relay to obtain new ip""" with Controller.from_port(port=self.control_port) as controller: controller.authenticate(self.control_password) controller.signal(Signal.NEWNYM) Add print_ip method for debuggingimport socks import socket import json from stem.control import Controller from stem import Signal from urllib2 import urlopen class Tor(object): """Tor class for socks proxy and controller""" def __init__(self, socks_port=9050, control_port=9051, control_password=""): self.socks_port = socks_port self.control_port = control_port self.control_password = control_password self.default_socket = socket.socket def connect(self): """connect to Tor socks proxy""" socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", self.socks_port) socket.socket = socks.socksocket def disconnect(self): """disconnect Tor socks proxy""" socket.socket = self.default_socket def change_relay(self): """change Tor relay to obtain new ip""" with Controller.from_port(port=self.control_port) as controller: controller.authenticate(self.control_password) controller.signal(Signal.NEWNYM) def print_ip(): """print ip for debug""" json_str = urlopen('http://ip-api.com/json').read().decode('UTF-8') ip_dict = json.loads(json_str) for key, value in ip_dict.items(): print("%s: %s" % (key, value))
<commit_before>import socks import socket from stem.control import Controller from stem import Signal class Tor(object): """Tor class for socks proxy and controller""" def __init__(self, socks_port=9050, control_port=9051, control_password=""): self.socks_port = socks_port self.control_port = control_port self.control_password = control_password self.default_socket = socket.socket def connect(self): """connect to Tor socks proxy""" socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", self.socks_port) socket.socket = socks.socksocket def disconnect(self): """disconnect Tor socks proxy""" socket.socket = self.default_socket def change_relay(self): """change Tor relay to obtain new ip""" with Controller.from_port(port=self.control_port) as controller: controller.authenticate(self.control_password) controller.signal(Signal.NEWNYM) <commit_msg>Add print_ip method for debugging<commit_after>import socks import socket import json from stem.control import Controller from stem import Signal from urllib2 import urlopen class Tor(object): """Tor class for socks proxy and controller""" def __init__(self, socks_port=9050, control_port=9051, control_password=""): self.socks_port = socks_port self.control_port = control_port self.control_password = control_password self.default_socket = socket.socket def connect(self): """connect to Tor socks proxy""" socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", self.socks_port) socket.socket = socks.socksocket def disconnect(self): """disconnect Tor socks proxy""" socket.socket = self.default_socket def change_relay(self): """change Tor relay to obtain new ip""" with Controller.from_port(port=self.control_port) as controller: controller.authenticate(self.control_password) controller.signal(Signal.NEWNYM) def print_ip(): """print ip for debug""" json_str = urlopen('http://ip-api.com/json').read().decode('UTF-8') ip_dict = json.loads(json_str) for key, value in ip_dict.items(): print("%s: %s" % (key, value))
a52039c139e0d5b1c5fedb1dfb160e2c9b9387b3
teuthology/task/tests/test_locking.py
teuthology/task/tests/test_locking.py
import pytest class TestLocking(object): def test_correct_os_type(self, ctx, config): os_type = ctx.config.get("os_type") if os_type is None: pytest.skip('os_type was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.name == os_type def test_correct_os_version(self, ctx, config): os_version = ctx.config.get("os_version") if os_version is None: pytest.skip('os_version was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.version == os_version def test_correct_machine_type(self, ctx, config): machine_type = ctx.machine_type for remote in ctx.cluster.remotes.iterkeys(): assert remote.machine_type in machine_type
import pytest class TestLocking(object): def test_correct_os_type(self, ctx, config): os_type = ctx.config.get("os_type") if os_type is None: pytest.skip('os_type was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.name == os_type def test_correct_os_version(self, ctx, config): os_version = ctx.config.get("os_version") if os_version is None: pytest.skip('os_version was not defined') if ctx.config.get("os_type") == "debian": pytest.skip('known issue with debian versions; see: issue #10878') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.version == os_version def test_correct_machine_type(self, ctx, config): machine_type = ctx.machine_type for remote in ctx.cluster.remotes.iterkeys(): assert remote.machine_type in machine_type
Make an exception for debian in tests.test_correct_os_version
Make an exception for debian in tests.test_correct_os_version This is because of a known issue where downburst gives us 7.1 when we ask for 7.0. We're ok with this behavior for now. See: issue #10878 Signed-off-by: Andrew Schoen <1bb641dc23c3a93cce4eee683bcf4b2bea7903a3@redhat.com>
Python
mit
SUSE/teuthology,yghannam/teuthology,dmick/teuthology,caibo2014/teuthology,dreamhost/teuthology,caibo2014/teuthology,t-miyamae/teuthology,ivotron/teuthology,SUSE/teuthology,ceph/teuthology,ivotron/teuthology,ktdreyer/teuthology,SUSE/teuthology,dmick/teuthology,t-miyamae/teuthology,robbat2/teuthology,zhouyuan/teuthology,ceph/teuthology,dmick/teuthology,michaelsevilla/teuthology,dreamhost/teuthology,zhouyuan/teuthology,robbat2/teuthology,ktdreyer/teuthology,yghannam/teuthology,michaelsevilla/teuthology
import pytest class TestLocking(object): def test_correct_os_type(self, ctx, config): os_type = ctx.config.get("os_type") if os_type is None: pytest.skip('os_type was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.name == os_type def test_correct_os_version(self, ctx, config): os_version = ctx.config.get("os_version") if os_version is None: pytest.skip('os_version was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.version == os_version def test_correct_machine_type(self, ctx, config): machine_type = ctx.machine_type for remote in ctx.cluster.remotes.iterkeys(): assert remote.machine_type in machine_type Make an exception for debian in tests.test_correct_os_version This is because of a known issue where downburst gives us 7.1 when we ask for 7.0. We're ok with this behavior for now. See: issue #10878 Signed-off-by: Andrew Schoen <1bb641dc23c3a93cce4eee683bcf4b2bea7903a3@redhat.com>
import pytest class TestLocking(object): def test_correct_os_type(self, ctx, config): os_type = ctx.config.get("os_type") if os_type is None: pytest.skip('os_type was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.name == os_type def test_correct_os_version(self, ctx, config): os_version = ctx.config.get("os_version") if os_version is None: pytest.skip('os_version was not defined') if ctx.config.get("os_type") == "debian": pytest.skip('known issue with debian versions; see: issue #10878') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.version == os_version def test_correct_machine_type(self, ctx, config): machine_type = ctx.machine_type for remote in ctx.cluster.remotes.iterkeys(): assert remote.machine_type in machine_type
<commit_before>import pytest class TestLocking(object): def test_correct_os_type(self, ctx, config): os_type = ctx.config.get("os_type") if os_type is None: pytest.skip('os_type was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.name == os_type def test_correct_os_version(self, ctx, config): os_version = ctx.config.get("os_version") if os_version is None: pytest.skip('os_version was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.version == os_version def test_correct_machine_type(self, ctx, config): machine_type = ctx.machine_type for remote in ctx.cluster.remotes.iterkeys(): assert remote.machine_type in machine_type <commit_msg>Make an exception for debian in tests.test_correct_os_version This is because of a known issue where downburst gives us 7.1 when we ask for 7.0. We're ok with this behavior for now. See: issue #10878 Signed-off-by: Andrew Schoen <1bb641dc23c3a93cce4eee683bcf4b2bea7903a3@redhat.com><commit_after>
import pytest class TestLocking(object): def test_correct_os_type(self, ctx, config): os_type = ctx.config.get("os_type") if os_type is None: pytest.skip('os_type was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.name == os_type def test_correct_os_version(self, ctx, config): os_version = ctx.config.get("os_version") if os_version is None: pytest.skip('os_version was not defined') if ctx.config.get("os_type") == "debian": pytest.skip('known issue with debian versions; see: issue #10878') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.version == os_version def test_correct_machine_type(self, ctx, config): machine_type = ctx.machine_type for remote in ctx.cluster.remotes.iterkeys(): assert remote.machine_type in machine_type
import pytest class TestLocking(object): def test_correct_os_type(self, ctx, config): os_type = ctx.config.get("os_type") if os_type is None: pytest.skip('os_type was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.name == os_type def test_correct_os_version(self, ctx, config): os_version = ctx.config.get("os_version") if os_version is None: pytest.skip('os_version was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.version == os_version def test_correct_machine_type(self, ctx, config): machine_type = ctx.machine_type for remote in ctx.cluster.remotes.iterkeys(): assert remote.machine_type in machine_type Make an exception for debian in tests.test_correct_os_version This is because of a known issue where downburst gives us 7.1 when we ask for 7.0. We're ok with this behavior for now. See: issue #10878 Signed-off-by: Andrew Schoen <1bb641dc23c3a93cce4eee683bcf4b2bea7903a3@redhat.com>import pytest class TestLocking(object): def test_correct_os_type(self, ctx, config): os_type = ctx.config.get("os_type") if os_type is None: pytest.skip('os_type was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.name == os_type def test_correct_os_version(self, ctx, config): os_version = ctx.config.get("os_version") if os_version is None: pytest.skip('os_version was not defined') if ctx.config.get("os_type") == "debian": pytest.skip('known issue with debian versions; see: issue #10878') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.version == os_version def test_correct_machine_type(self, ctx, config): machine_type = ctx.machine_type for remote in ctx.cluster.remotes.iterkeys(): assert remote.machine_type in machine_type
<commit_before>import pytest class TestLocking(object): def test_correct_os_type(self, ctx, config): os_type = ctx.config.get("os_type") if os_type is None: pytest.skip('os_type was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.name == os_type def test_correct_os_version(self, ctx, config): os_version = ctx.config.get("os_version") if os_version is None: pytest.skip('os_version was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.version == os_version def test_correct_machine_type(self, ctx, config): machine_type = ctx.machine_type for remote in ctx.cluster.remotes.iterkeys(): assert remote.machine_type in machine_type <commit_msg>Make an exception for debian in tests.test_correct_os_version This is because of a known issue where downburst gives us 7.1 when we ask for 7.0. We're ok with this behavior for now. See: issue #10878 Signed-off-by: Andrew Schoen <1bb641dc23c3a93cce4eee683bcf4b2bea7903a3@redhat.com><commit_after>import pytest class TestLocking(object): def test_correct_os_type(self, ctx, config): os_type = ctx.config.get("os_type") if os_type is None: pytest.skip('os_type was not defined') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.name == os_type def test_correct_os_version(self, ctx, config): os_version = ctx.config.get("os_version") if os_version is None: pytest.skip('os_version was not defined') if ctx.config.get("os_type") == "debian": pytest.skip('known issue with debian versions; see: issue #10878') for remote in ctx.cluster.remotes.iterkeys(): assert remote.os.version == os_version def test_correct_machine_type(self, ctx, config): machine_type = ctx.machine_type for remote in ctx.cluster.remotes.iterkeys(): assert remote.machine_type in machine_type
6fb605a46a9dd33a2f31128955ca79e44379ad4d
main.py
main.py
# Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, api_key) request = urllib2.Request(host) response = urllib2.urlopen(request) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True)
# Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, key) request = urllib2.Request(host) response = urllib2.urlopen(request) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True)
Fix api key call to be renmamed key
Fix api key call to be renmamed key
Python
apache-2.0
googlearchive/appengine-python-vm-hello,bshaffer/appengine-python-vm-hello,bshaffer/appengine-python-vm-hello,googlearchive/appengine-python-vm-hello
# Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, api_key) request = urllib2.Request(host) response = urllib2.urlopen(request) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True) Fix api key call to be renmamed key
# Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, key) request = urllib2.Request(host) response = urllib2.urlopen(request) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True)
<commit_before># Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, api_key) request = urllib2.Request(host) response = urllib2.urlopen(request) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True) <commit_msg>Fix api key call to be renmamed key<commit_after>
# Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, key) request = urllib2.Request(host) response = urllib2.urlopen(request) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True)
# Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, api_key) request = urllib2.Request(host) response = urllib2.urlopen(request) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True) Fix api key call to be renmamed key# Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, key) request = urllib2.Request(host) response = urllib2.urlopen(request) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True)
<commit_before># Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, api_key) request = urllib2.Request(host) response = urllib2.urlopen(request) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True) <commit_msg>Fix api key call to be renmamed key<commit_after># Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, key) request = urllib2.Request(host) response = urllib2.urlopen(request) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True)
424162d2dc8a7c815c48946f4963561be20df62d
config_context.py
config_context.py
# A context carrying the loaded configuration. from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver import config class ConfigContext(object): def __init__(self, config): self._config = config self._cloud_workstations = {} self._volumes_by_cws = {} self._build_indexes() # Only support GCE for now assert config.provider == 'GCE' ComputeEngine = get_driver(Provider.GCE) self._driver = ComputeEngine('', '', project=self._config.project) def _build_indexes(self): for cws in self._config.cloud_workstations: self._cloud_workstations[cws.name] = cws for v in cws.volumes: self._volumes[(cws.name, v.name)] = v def driver(self): return self._driver def get_cws(self, cws_name): return self._cloud_workstations[cws_name] def get_volume(self, cws_name, volume_name): return self._volumes[(cws_name, volume_name)] def get_volumes(self, cws_name): # Returned volumes order is non-deterministic. This will have # to do for now. volumes = [] for (name, _), v in self._volumes.iteritems(): if name == cws_name: volumes.append(v) return volumes
# A context carrying the loaded configuration. from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver import config class ConfigContext(object): def __init__(self, config): self._config = config self._cloud_workstations = {} self._volumes = {} self._build_indexes() # Only support GCE for now assert config.provider == 'GCE' ComputeEngine = get_driver(Provider.GCE) self._driver = ComputeEngine('', '', project=self._config.project) def _build_indexes(self): for cws in self._config.cloud_workstations: self._cloud_workstations[cws.name] = cws for v in cws.volumes: self._volumes[(cws.name, v.name)] = v def driver(self): return self._driver def get_cws(self, cws_name): return self._cloud_workstations[cws_name] def get_volume(self, cws_name, volume_name): return self._volumes[(cws_name, volume_name)] def get_volumes(self, cws_name): # Returned volumes order is non-deterministic. This will have # to do for now. volumes = [] for (name, _), v in self._volumes.iteritems(): if name == cws_name: volumes.append(v) return volumes
Revert unintend change. Fixes bug.
Revert unintend change. Fixes bug.
Python
mit
fhltang/chews,fhltang/chews
# A context carrying the loaded configuration. from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver import config class ConfigContext(object): def __init__(self, config): self._config = config self._cloud_workstations = {} self._volumes_by_cws = {} self._build_indexes() # Only support GCE for now assert config.provider == 'GCE' ComputeEngine = get_driver(Provider.GCE) self._driver = ComputeEngine('', '', project=self._config.project) def _build_indexes(self): for cws in self._config.cloud_workstations: self._cloud_workstations[cws.name] = cws for v in cws.volumes: self._volumes[(cws.name, v.name)] = v def driver(self): return self._driver def get_cws(self, cws_name): return self._cloud_workstations[cws_name] def get_volume(self, cws_name, volume_name): return self._volumes[(cws_name, volume_name)] def get_volumes(self, cws_name): # Returned volumes order is non-deterministic. This will have # to do for now. volumes = [] for (name, _), v in self._volumes.iteritems(): if name == cws_name: volumes.append(v) return volumes Revert unintend change. Fixes bug.
# A context carrying the loaded configuration. from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver import config class ConfigContext(object): def __init__(self, config): self._config = config self._cloud_workstations = {} self._volumes = {} self._build_indexes() # Only support GCE for now assert config.provider == 'GCE' ComputeEngine = get_driver(Provider.GCE) self._driver = ComputeEngine('', '', project=self._config.project) def _build_indexes(self): for cws in self._config.cloud_workstations: self._cloud_workstations[cws.name] = cws for v in cws.volumes: self._volumes[(cws.name, v.name)] = v def driver(self): return self._driver def get_cws(self, cws_name): return self._cloud_workstations[cws_name] def get_volume(self, cws_name, volume_name): return self._volumes[(cws_name, volume_name)] def get_volumes(self, cws_name): # Returned volumes order is non-deterministic. This will have # to do for now. volumes = [] for (name, _), v in self._volumes.iteritems(): if name == cws_name: volumes.append(v) return volumes
<commit_before># A context carrying the loaded configuration. from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver import config class ConfigContext(object): def __init__(self, config): self._config = config self._cloud_workstations = {} self._volumes_by_cws = {} self._build_indexes() # Only support GCE for now assert config.provider == 'GCE' ComputeEngine = get_driver(Provider.GCE) self._driver = ComputeEngine('', '', project=self._config.project) def _build_indexes(self): for cws in self._config.cloud_workstations: self._cloud_workstations[cws.name] = cws for v in cws.volumes: self._volumes[(cws.name, v.name)] = v def driver(self): return self._driver def get_cws(self, cws_name): return self._cloud_workstations[cws_name] def get_volume(self, cws_name, volume_name): return self._volumes[(cws_name, volume_name)] def get_volumes(self, cws_name): # Returned volumes order is non-deterministic. This will have # to do for now. volumes = [] for (name, _), v in self._volumes.iteritems(): if name == cws_name: volumes.append(v) return volumes <commit_msg>Revert unintend change. Fixes bug.<commit_after>
# A context carrying the loaded configuration. from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver import config class ConfigContext(object): def __init__(self, config): self._config = config self._cloud_workstations = {} self._volumes = {} self._build_indexes() # Only support GCE for now assert config.provider == 'GCE' ComputeEngine = get_driver(Provider.GCE) self._driver = ComputeEngine('', '', project=self._config.project) def _build_indexes(self): for cws in self._config.cloud_workstations: self._cloud_workstations[cws.name] = cws for v in cws.volumes: self._volumes[(cws.name, v.name)] = v def driver(self): return self._driver def get_cws(self, cws_name): return self._cloud_workstations[cws_name] def get_volume(self, cws_name, volume_name): return self._volumes[(cws_name, volume_name)] def get_volumes(self, cws_name): # Returned volumes order is non-deterministic. This will have # to do for now. volumes = [] for (name, _), v in self._volumes.iteritems(): if name == cws_name: volumes.append(v) return volumes
# A context carrying the loaded configuration. from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver import config class ConfigContext(object): def __init__(self, config): self._config = config self._cloud_workstations = {} self._volumes_by_cws = {} self._build_indexes() # Only support GCE for now assert config.provider == 'GCE' ComputeEngine = get_driver(Provider.GCE) self._driver = ComputeEngine('', '', project=self._config.project) def _build_indexes(self): for cws in self._config.cloud_workstations: self._cloud_workstations[cws.name] = cws for v in cws.volumes: self._volumes[(cws.name, v.name)] = v def driver(self): return self._driver def get_cws(self, cws_name): return self._cloud_workstations[cws_name] def get_volume(self, cws_name, volume_name): return self._volumes[(cws_name, volume_name)] def get_volumes(self, cws_name): # Returned volumes order is non-deterministic. This will have # to do for now. volumes = [] for (name, _), v in self._volumes.iteritems(): if name == cws_name: volumes.append(v) return volumes Revert unintend change. Fixes bug.# A context carrying the loaded configuration. from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver import config class ConfigContext(object): def __init__(self, config): self._config = config self._cloud_workstations = {} self._volumes = {} self._build_indexes() # Only support GCE for now assert config.provider == 'GCE' ComputeEngine = get_driver(Provider.GCE) self._driver = ComputeEngine('', '', project=self._config.project) def _build_indexes(self): for cws in self._config.cloud_workstations: self._cloud_workstations[cws.name] = cws for v in cws.volumes: self._volumes[(cws.name, v.name)] = v def driver(self): return self._driver def get_cws(self, cws_name): return self._cloud_workstations[cws_name] def get_volume(self, cws_name, volume_name): return self._volumes[(cws_name, volume_name)] def get_volumes(self, cws_name): # Returned volumes order is non-deterministic. This will have # to do for now. volumes = [] for (name, _), v in self._volumes.iteritems(): if name == cws_name: volumes.append(v) return volumes
<commit_before># A context carrying the loaded configuration. from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver import config class ConfigContext(object): def __init__(self, config): self._config = config self._cloud_workstations = {} self._volumes_by_cws = {} self._build_indexes() # Only support GCE for now assert config.provider == 'GCE' ComputeEngine = get_driver(Provider.GCE) self._driver = ComputeEngine('', '', project=self._config.project) def _build_indexes(self): for cws in self._config.cloud_workstations: self._cloud_workstations[cws.name] = cws for v in cws.volumes: self._volumes[(cws.name, v.name)] = v def driver(self): return self._driver def get_cws(self, cws_name): return self._cloud_workstations[cws_name] def get_volume(self, cws_name, volume_name): return self._volumes[(cws_name, volume_name)] def get_volumes(self, cws_name): # Returned volumes order is non-deterministic. This will have # to do for now. volumes = [] for (name, _), v in self._volumes.iteritems(): if name == cws_name: volumes.append(v) return volumes <commit_msg>Revert unintend change. Fixes bug.<commit_after># A context carrying the loaded configuration. from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver import config class ConfigContext(object): def __init__(self, config): self._config = config self._cloud_workstations = {} self._volumes = {} self._build_indexes() # Only support GCE for now assert config.provider == 'GCE' ComputeEngine = get_driver(Provider.GCE) self._driver = ComputeEngine('', '', project=self._config.project) def _build_indexes(self): for cws in self._config.cloud_workstations: self._cloud_workstations[cws.name] = cws for v in cws.volumes: self._volumes[(cws.name, v.name)] = v def driver(self): return self._driver def get_cws(self, cws_name): return self._cloud_workstations[cws_name] def get_volume(self, cws_name, volume_name): return self._volumes[(cws_name, volume_name)] def get_volumes(self, cws_name): # Returned volumes order is non-deterministic. This will have # to do for now. volumes = [] for (name, _), v in self._volumes.iteritems(): if name == cws_name: volumes.append(v) return volumes
ab1e27b3b28e0463f6dd182037a265c9f69fb94f
src/azure/cli/commands/resourcegroup.py
src/azure/cli/commands/resourcegroup.py
from msrest import Serializer from ..commands import command, description from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes # @option('--tag-name -g <tagName>', L('the resource group's tag name')) # @option('--tag-value -g <tagValue>', L('the resource group's tag value')) # @option('--top -g <number>', L('Top N resource groups to retrieve')) def list_groups(args, unexpected): #pylint: disable=unused-argument from azure.mgmt.resource.resources import ResourceManagementClient, \ ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = ResourceManagementClient( ResourceManagementClientConfiguration(*Profile().get_login_credentials())) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() serializable = Serializer().serialize_data(groups, "[ResourceGroup]") return serializable
from msrest import Serializer from ..commands import command, description from ._command_creation import get_service_client from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes # @option('--tag-name -g <tagName>', L('the resource group's tag name')) # @option('--tag-value -g <tagValue>', L('the resource group's tag value')) # @option('--top -g <number>', L('Top N resource groups to retrieve')) def list_groups(args, unexpected): #pylint: disable=unused-argument from azure.mgmt.resource.resources import ResourceManagementClient, \ ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = get_service_client(ResourceManagementClient, ResourceManagementClientConfiguration) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() serializable = Serializer().serialize_data(groups, "[ResourceGroup]") return serializable
Use service client factory method for resource group command
Use service client factory method for resource group command
Python
mit
samedder/azure-cli,samedder/azure-cli,yugangw-msft/azure-cli,QingChenmsft/azure-cli,QingChenmsft/azure-cli,QingChenmsft/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,BurtBiel/azure-cli,samedder/azure-cli,yugangw-msft/azure-cli,BurtBiel/azure-cli,samedder/azure-cli
from msrest import Serializer from ..commands import command, description from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes # @option('--tag-name -g <tagName>', L('the resource group's tag name')) # @option('--tag-value -g <tagValue>', L('the resource group's tag value')) # @option('--top -g <number>', L('Top N resource groups to retrieve')) def list_groups(args, unexpected): #pylint: disable=unused-argument from azure.mgmt.resource.resources import ResourceManagementClient, \ ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = ResourceManagementClient( ResourceManagementClientConfiguration(*Profile().get_login_credentials())) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() serializable = Serializer().serialize_data(groups, "[ResourceGroup]") return serializable Use service client factory method for resource group command
from msrest import Serializer from ..commands import command, description from ._command_creation import get_service_client from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes # @option('--tag-name -g <tagName>', L('the resource group's tag name')) # @option('--tag-value -g <tagValue>', L('the resource group's tag value')) # @option('--top -g <number>', L('Top N resource groups to retrieve')) def list_groups(args, unexpected): #pylint: disable=unused-argument from azure.mgmt.resource.resources import ResourceManagementClient, \ ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = get_service_client(ResourceManagementClient, ResourceManagementClientConfiguration) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() serializable = Serializer().serialize_data(groups, "[ResourceGroup]") return serializable
<commit_before>from msrest import Serializer from ..commands import command, description from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes # @option('--tag-name -g <tagName>', L('the resource group's tag name')) # @option('--tag-value -g <tagValue>', L('the resource group's tag value')) # @option('--top -g <number>', L('Top N resource groups to retrieve')) def list_groups(args, unexpected): #pylint: disable=unused-argument from azure.mgmt.resource.resources import ResourceManagementClient, \ ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = ResourceManagementClient( ResourceManagementClientConfiguration(*Profile().get_login_credentials())) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() serializable = Serializer().serialize_data(groups, "[ResourceGroup]") return serializable <commit_msg>Use service client factory method for resource group command<commit_after>
from msrest import Serializer from ..commands import command, description from ._command_creation import get_service_client from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes # @option('--tag-name -g <tagName>', L('the resource group's tag name')) # @option('--tag-value -g <tagValue>', L('the resource group's tag value')) # @option('--top -g <number>', L('Top N resource groups to retrieve')) def list_groups(args, unexpected): #pylint: disable=unused-argument from azure.mgmt.resource.resources import ResourceManagementClient, \ ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = get_service_client(ResourceManagementClient, ResourceManagementClientConfiguration) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() serializable = Serializer().serialize_data(groups, "[ResourceGroup]") return serializable
from msrest import Serializer from ..commands import command, description from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes # @option('--tag-name -g <tagName>', L('the resource group's tag name')) # @option('--tag-value -g <tagValue>', L('the resource group's tag value')) # @option('--top -g <number>', L('Top N resource groups to retrieve')) def list_groups(args, unexpected): #pylint: disable=unused-argument from azure.mgmt.resource.resources import ResourceManagementClient, \ ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = ResourceManagementClient( ResourceManagementClientConfiguration(*Profile().get_login_credentials())) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() serializable = Serializer().serialize_data(groups, "[ResourceGroup]") return serializable Use service client factory method for resource group commandfrom msrest import Serializer from ..commands import command, description from ._command_creation import get_service_client from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes # @option('--tag-name -g <tagName>', L('the resource group's tag name')) # @option('--tag-value -g <tagValue>', L('the resource group's tag value')) # @option('--top -g <number>', L('Top N resource groups to retrieve')) def list_groups(args, unexpected): #pylint: disable=unused-argument from azure.mgmt.resource.resources import ResourceManagementClient, \ ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = get_service_client(ResourceManagementClient, ResourceManagementClientConfiguration) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() serializable = Serializer().serialize_data(groups, "[ResourceGroup]") return serializable
<commit_before>from msrest import Serializer from ..commands import command, description from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes # @option('--tag-name -g <tagName>', L('the resource group's tag name')) # @option('--tag-value -g <tagValue>', L('the resource group's tag value')) # @option('--top -g <number>', L('Top N resource groups to retrieve')) def list_groups(args, unexpected): #pylint: disable=unused-argument from azure.mgmt.resource.resources import ResourceManagementClient, \ ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = ResourceManagementClient( ResourceManagementClientConfiguration(*Profile().get_login_credentials())) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() serializable = Serializer().serialize_data(groups, "[ResourceGroup]") return serializable <commit_msg>Use service client factory method for resource group command<commit_after>from msrest import Serializer from ..commands import command, description from ._command_creation import get_service_client from .._profile import Profile @command('resource group list') @description('List resource groups') # TODO: waiting on Python Azure SDK bug fixes # @option('--tag-name -g <tagName>', L('the resource group's tag name')) # @option('--tag-value -g <tagValue>', L('the resource group's tag value')) # @option('--top -g <number>', L('Top N resource groups to retrieve')) def list_groups(args, unexpected): #pylint: disable=unused-argument from azure.mgmt.resource.resources import ResourceManagementClient, \ ResourceManagementClientConfiguration from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter rmc = get_service_client(ResourceManagementClient, ResourceManagementClientConfiguration) # TODO: waiting on Python Azure SDK bug fixes #group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value')) #groups = rmc.resource_groups.list(filter=None, top=args.get('top')) groups = rmc.resource_groups.list() serializable = Serializer().serialize_data(groups, "[ResourceGroup]") return serializable
65d2a5f08ee96e80752362f7545167888599819e
website/addons/figshare/exceptions.py
website/addons/figshare/exceptions.py
from website.util.sanitize import escape_html from website.addons.base.exceptions import AddonEnrichmentError class FigshareIsDraftError(AddonEnrichmentError): def __init__(self, file_guid): self.file_guid = file_guid @property def renderable_error(self): return ''' <div class="alert alert-info" role="alert"> The file "{name}" is still a draft on Figshare. <br> To view it on the OSF <a href="http://figshare.com/faqs">publish</a> it on Figshare. </div> '''.format(name=escape_html(self.file_guid.name))
from website.util.sanitize import escape_html from website.addons.base.exceptions import AddonEnrichmentError class FigshareIsDraftError(AddonEnrichmentError): def __init__(self, file_guid): self.file_guid = file_guid @property def can_delete(self): return True @property def renderable_error(self): return ''' <div class="alert alert-info" role="alert"> The file "{name}" is still a draft on Figshare. <br> To view it on the OSF <a href="http://figshare.com/faqs">publish</a> it on Figshare. </div> '''.format(name=escape_html(self.file_guid.name))
Allow deletion of figshare drafts
Allow deletion of figshare drafts
Python
apache-2.0
zachjanicki/osf.io,DanielSBrown/osf.io,njantrania/osf.io,kushG/osf.io,erinspace/osf.io,GaryKriebel/osf.io,wearpants/osf.io,chrisseto/osf.io,samchrisinger/osf.io,caneruguz/osf.io,petermalcolm/osf.io,doublebits/osf.io,arpitar/osf.io,cldershem/osf.io,Nesiehr/osf.io,amyshi188/osf.io,brandonPurvis/osf.io,mluo613/osf.io,pattisdr/osf.io,mluo613/osf.io,RomanZWang/osf.io,bdyetton/prettychart,KAsante95/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,dplorimer/osf,Nesiehr/osf.io,jinluyuan/osf.io,ZobairAlijan/osf.io,samanehsan/osf.io,danielneis/osf.io,samanehsan/osf.io,brandonPurvis/osf.io,samanehsan/osf.io,adlius/osf.io,himanshuo/osf.io,alexschiller/osf.io,jinluyuan/osf.io,jnayak1/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,kch8qx/osf.io,erinspace/osf.io,zamattiac/osf.io,haoyuchen1992/osf.io,reinaH/osf.io,pattisdr/osf.io,alexschiller/osf.io,ckc6cz/osf.io,mluo613/osf.io,aaxelb/osf.io,njantrania/osf.io,TomBaxter/osf.io,mfraezz/osf.io,RomanZWang/osf.io,kwierman/osf.io,mluke93/osf.io,icereval/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,kushG/osf.io,KAsante95/osf.io,mfraezz/osf.io,chennan47/osf.io,acshi/osf.io,MerlinZhang/osf.io,ZobairAlijan/osf.io,Ghalko/osf.io,icereval/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,DanielSBrown/osf.io,rdhyee/osf.io,ticklemepierce/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,samchrisinger/osf.io,haoyuchen1992/osf.io,acshi/osf.io,asanfilippo7/osf.io,jnayak1/osf.io,zachjanicki/osf.io,DanielSBrown/osf.io,lyndsysimon/osf.io,cwisecarver/osf.io,fabianvf/osf.io,asanfilippo7/osf.io,baylee-d/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,lamdnhan/osf.io,HarryRybacki/osf.io,reinaH/osf.io,lyndsysimon/osf.io,ZobairAlijan/osf.io,monikagrabowska/osf.io,saradbowman/osf.io,alexschiller/osf.io,cslzchen/osf.io,bdyetton/prettychart,hmoco/osf.io,caseyrollins/osf.io,billyhunt/osf.io,erinspace/osf.io,GaryKriebel/osf.io,MerlinZhang/osf.io,cldershem/osf.io,HarryRybacki/osf.io,binoculars/osf.io,revanthkolli/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,sbt9uc/osf.io,felliott/osf.io,doublebits/osf.io,rdhyee/osf.io,Ghalko/osf.io,billyhunt/osf.io,crcresearch/osf.io,sbt9uc/osf.io,caseyrollins/osf.io,KAsante95/osf.io,KAsante95/osf.io,wearpants/osf.io,TomBaxter/osf.io,HarryRybacki/osf.io,haoyuchen1992/osf.io,zamattiac/osf.io,felliott/osf.io,Johnetordoff/osf.io,jeffreyliu3230/osf.io,petermalcolm/osf.io,danielneis/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,hmoco/osf.io,hmoco/osf.io,billyhunt/osf.io,alexschiller/osf.io,baylee-d/osf.io,aaxelb/osf.io,jeffreyliu3230/osf.io,zachjanicki/osf.io,DanielSBrown/osf.io,crcresearch/osf.io,TomHeatwole/osf.io,mluke93/osf.io,kch8qx/osf.io,njantrania/osf.io,binoculars/osf.io,ckc6cz/osf.io,billyhunt/osf.io,chennan47/osf.io,GaryKriebel/osf.io,leb2dg/osf.io,cosenal/osf.io,TomBaxter/osf.io,cosenal/osf.io,barbour-em/osf.io,Ghalko/osf.io,SSJohns/osf.io,zkraime/osf.io,icereval/osf.io,TomHeatwole/osf.io,HalcyonChimera/osf.io,samchrisinger/osf.io,cslzchen/osf.io,cwisecarver/osf.io,mattclark/osf.io,rdhyee/osf.io,haoyuchen1992/osf.io,caneruguz/osf.io,chrisseto/osf.io,chrisseto/osf.io,GageGaskins/osf.io,laurenrevere/osf.io,barbour-em/osf.io,himanshuo/osf.io,wearpants/osf.io,RomanZWang/osf.io,barbour-em/osf.io,monikagrabowska/osf.io,MerlinZhang/osf.io,TomHeatwole/osf.io,zkraime/osf.io,SSJohns/osf.io,crcresearch/osf.io,jinluyuan/osf.io,sloria/osf.io,revanthkolli/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,arpitar/osf.io,petermalcolm/osf.io,hmoco/osf.io,GaryKriebel/osf.io,caseyrygt/osf.io,felliott/osf.io,bdyetton/prettychart,ckc6cz/osf.io,danielneis/osf.io,binoculars/osf.io,ckc6cz/osf.io,samanehsan/osf.io,aaxelb/osf.io,jnayak1/osf.io,mluke93/osf.io,KAsante95/osf.io,arpitar/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,asanfilippo7/osf.io,RomanZWang/osf.io,Ghalko/osf.io,doublebits/osf.io,fabianvf/osf.io,pattisdr/osf.io,jnayak1/osf.io,emetsger/osf.io,fabianvf/osf.io,sloria/osf.io,kch8qx/osf.io,laurenrevere/osf.io,jolene-esposito/osf.io,rdhyee/osf.io,revanthkolli/osf.io,jmcarp/osf.io,jolene-esposito/osf.io,lamdnhan/osf.io,jeffreyliu3230/osf.io,caseyrygt/osf.io,doublebits/osf.io,dplorimer/osf,abought/osf.io,danielneis/osf.io,emetsger/osf.io,sbt9uc/osf.io,adlius/osf.io,njantrania/osf.io,cslzchen/osf.io,cwisecarver/osf.io,mattclark/osf.io,HarryRybacki/osf.io,jolene-esposito/osf.io,barbour-em/osf.io,cosenal/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,jolene-esposito/osf.io,caseyrygt/osf.io,cosenal/osf.io,zkraime/osf.io,kwierman/osf.io,dplorimer/osf,himanshuo/osf.io,amyshi188/osf.io,lyndsysimon/osf.io,reinaH/osf.io,jmcarp/osf.io,TomHeatwole/osf.io,bdyetton/prettychart,jmcarp/osf.io,mattclark/osf.io,kushG/osf.io,abought/osf.io,acshi/osf.io,samchrisinger/osf.io,jmcarp/osf.io,ZobairAlijan/osf.io,lamdnhan/osf.io,jeffreyliu3230/osf.io,sbt9uc/osf.io,amyshi188/osf.io,lamdnhan/osf.io,cldershem/osf.io,reinaH/osf.io,ticklemepierce/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,revanthkolli/osf.io,emetsger/osf.io,mluke93/osf.io,baylee-d/osf.io,leb2dg/osf.io,doublebits/osf.io,kushG/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,SSJohns/osf.io,mfraezz/osf.io,lyndsysimon/osf.io,felliott/osf.io,Johnetordoff/osf.io,MerlinZhang/osf.io,brandonPurvis/osf.io,himanshuo/osf.io,adlius/osf.io,GageGaskins/osf.io,petermalcolm/osf.io,brianjgeiger/osf.io,abought/osf.io,mluo613/osf.io,cslzchen/osf.io,dplorimer/osf,ticklemepierce/osf.io,Nesiehr/osf.io,zkraime/osf.io,leb2dg/osf.io,adlius/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,GageGaskins/osf.io,cldershem/osf.io,jinluyuan/osf.io,fabianvf/osf.io,abought/osf.io,caseyrygt/osf.io,arpitar/osf.io,chennan47/osf.io,zamattiac/osf.io,aaxelb/osf.io,emetsger/osf.io,acshi/osf.io,amyshi188/osf.io,GageGaskins/osf.io,caneruguz/osf.io,kwierman/osf.io,kch8qx/osf.io,sloria/osf.io,acshi/osf.io,kwierman/osf.io,RomanZWang/osf.io,caseyrollins/osf.io
from website.util.sanitize import escape_html from website.addons.base.exceptions import AddonEnrichmentError class FigshareIsDraftError(AddonEnrichmentError): def __init__(self, file_guid): self.file_guid = file_guid @property def renderable_error(self): return ''' <div class="alert alert-info" role="alert"> The file "{name}" is still a draft on Figshare. <br> To view it on the OSF <a href="http://figshare.com/faqs">publish</a> it on Figshare. </div> '''.format(name=escape_html(self.file_guid.name)) Allow deletion of figshare drafts
from website.util.sanitize import escape_html from website.addons.base.exceptions import AddonEnrichmentError class FigshareIsDraftError(AddonEnrichmentError): def __init__(self, file_guid): self.file_guid = file_guid @property def can_delete(self): return True @property def renderable_error(self): return ''' <div class="alert alert-info" role="alert"> The file "{name}" is still a draft on Figshare. <br> To view it on the OSF <a href="http://figshare.com/faqs">publish</a> it on Figshare. </div> '''.format(name=escape_html(self.file_guid.name))
<commit_before>from website.util.sanitize import escape_html from website.addons.base.exceptions import AddonEnrichmentError class FigshareIsDraftError(AddonEnrichmentError): def __init__(self, file_guid): self.file_guid = file_guid @property def renderable_error(self): return ''' <div class="alert alert-info" role="alert"> The file "{name}" is still a draft on Figshare. <br> To view it on the OSF <a href="http://figshare.com/faqs">publish</a> it on Figshare. </div> '''.format(name=escape_html(self.file_guid.name)) <commit_msg>Allow deletion of figshare drafts<commit_after>
from website.util.sanitize import escape_html from website.addons.base.exceptions import AddonEnrichmentError class FigshareIsDraftError(AddonEnrichmentError): def __init__(self, file_guid): self.file_guid = file_guid @property def can_delete(self): return True @property def renderable_error(self): return ''' <div class="alert alert-info" role="alert"> The file "{name}" is still a draft on Figshare. <br> To view it on the OSF <a href="http://figshare.com/faqs">publish</a> it on Figshare. </div> '''.format(name=escape_html(self.file_guid.name))
from website.util.sanitize import escape_html from website.addons.base.exceptions import AddonEnrichmentError class FigshareIsDraftError(AddonEnrichmentError): def __init__(self, file_guid): self.file_guid = file_guid @property def renderable_error(self): return ''' <div class="alert alert-info" role="alert"> The file "{name}" is still a draft on Figshare. <br> To view it on the OSF <a href="http://figshare.com/faqs">publish</a> it on Figshare. </div> '''.format(name=escape_html(self.file_guid.name)) Allow deletion of figshare draftsfrom website.util.sanitize import escape_html from website.addons.base.exceptions import AddonEnrichmentError class FigshareIsDraftError(AddonEnrichmentError): def __init__(self, file_guid): self.file_guid = file_guid @property def can_delete(self): return True @property def renderable_error(self): return ''' <div class="alert alert-info" role="alert"> The file "{name}" is still a draft on Figshare. <br> To view it on the OSF <a href="http://figshare.com/faqs">publish</a> it on Figshare. </div> '''.format(name=escape_html(self.file_guid.name))
<commit_before>from website.util.sanitize import escape_html from website.addons.base.exceptions import AddonEnrichmentError class FigshareIsDraftError(AddonEnrichmentError): def __init__(self, file_guid): self.file_guid = file_guid @property def renderable_error(self): return ''' <div class="alert alert-info" role="alert"> The file "{name}" is still a draft on Figshare. <br> To view it on the OSF <a href="http://figshare.com/faqs">publish</a> it on Figshare. </div> '''.format(name=escape_html(self.file_guid.name)) <commit_msg>Allow deletion of figshare drafts<commit_after>from website.util.sanitize import escape_html from website.addons.base.exceptions import AddonEnrichmentError class FigshareIsDraftError(AddonEnrichmentError): def __init__(self, file_guid): self.file_guid = file_guid @property def can_delete(self): return True @property def renderable_error(self): return ''' <div class="alert alert-info" role="alert"> The file "{name}" is still a draft on Figshare. <br> To view it on the OSF <a href="http://figshare.com/faqs">publish</a> it on Figshare. </div> '''.format(name=escape_html(self.file_guid.name))
e492ffe0e8d57917db6b937523abaabe0ee4ff8e
simplehist/__init__.py
simplehist/__init__.py
#__all__ = ["hists", "binning"] from .hists import * #from .converter import ashist
#__all__ = ["hists", "binning"] from .hists import * from .converter import ashist
Include ashist in the general namespace by default
Include ashist in the general namespace by default
Python
mit
ndevenish/simplehistogram
#__all__ = ["hists", "binning"] from .hists import * #from .converter import ashistInclude ashist in the general namespace by default
#__all__ = ["hists", "binning"] from .hists import * from .converter import ashist
<commit_before> #__all__ = ["hists", "binning"] from .hists import * #from .converter import ashist<commit_msg>Include ashist in the general namespace by default<commit_after>
#__all__ = ["hists", "binning"] from .hists import * from .converter import ashist
#__all__ = ["hists", "binning"] from .hists import * #from .converter import ashistInclude ashist in the general namespace by default #__all__ = ["hists", "binning"] from .hists import * from .converter import ashist
<commit_before> #__all__ = ["hists", "binning"] from .hists import * #from .converter import ashist<commit_msg>Include ashist in the general namespace by default<commit_after> #__all__ = ["hists", "binning"] from .hists import * from .converter import ashist
96726f66fb4ac69328e84877ead5adb6c2037e5e
site/cgi/csv-upload.py
site/cgi/csv-upload.py
#!/usr/bin/python # Based on examples from # http://www.tutorialspoint.com/python/python_cgi_programming.htm import sys import cgi import os import cgitb cgitb.enable() CSV_DIR = '../csv/' # CSV upload directory form = cgi.FieldStorage() fileitem = form['filename'] # Get filename # Check if the file was uploaded if fileitem.filename: # strip leading path from file name to avoid # directory traversal attacks fn = os.path.basename(fileitem.filename) open(CSV_DIR + fn, 'wb').write(fileitem.file.read()) print 'Status: 204\r\n\r\n' # Success, don't reload page else: # Error, send a message print """\ Status: 500\r\n Content-Type: text/html;charset=UTF-8\n <html> <body> <p>Error: No se subi&oacute; el archivo.</p> </body> </html> """
#!/usr/bin/python # Based on examples from # http://www.tutorialspoint.com/python/python_cgi_programming.htm import cgi import os import cgitb cgitb.enable() CSV_DIR = '../csv/' # CSV upload directory form = cgi.FieldStorage() fileitem = form['filename'] # Get filename # Check if the file was uploaded if fileitem.filename: # strip leading path from file name to avoid # directory traversal attacks fn = os.path.basename(fileitem.filename) open(CSV_DIR + fn, 'wb').write(fileitem.file.read()) print 'Status: 204\r\n\r\n' # Success, don't reload page else: # Error, send a message print """\ Status: 500\r Content-Type: text/html;charset=UTF-8\r <html>\r <body>\r <p>Error: No se subi&oacute; el archivo.</p>\r </body>\r </html>\r """
Remove unused modules, fix HTML response
Remove unused modules, fix HTML response
Python
agpl-3.0
alejosanchez/CSVBenford,alejosanchez/CSVBenford
#!/usr/bin/python # Based on examples from # http://www.tutorialspoint.com/python/python_cgi_programming.htm import sys import cgi import os import cgitb cgitb.enable() CSV_DIR = '../csv/' # CSV upload directory form = cgi.FieldStorage() fileitem = form['filename'] # Get filename # Check if the file was uploaded if fileitem.filename: # strip leading path from file name to avoid # directory traversal attacks fn = os.path.basename(fileitem.filename) open(CSV_DIR + fn, 'wb').write(fileitem.file.read()) print 'Status: 204\r\n\r\n' # Success, don't reload page else: # Error, send a message print """\ Status: 500\r\n Content-Type: text/html;charset=UTF-8\n <html> <body> <p>Error: No se subi&oacute; el archivo.</p> </body> </html> """ Remove unused modules, fix HTML response
#!/usr/bin/python # Based on examples from # http://www.tutorialspoint.com/python/python_cgi_programming.htm import cgi import os import cgitb cgitb.enable() CSV_DIR = '../csv/' # CSV upload directory form = cgi.FieldStorage() fileitem = form['filename'] # Get filename # Check if the file was uploaded if fileitem.filename: # strip leading path from file name to avoid # directory traversal attacks fn = os.path.basename(fileitem.filename) open(CSV_DIR + fn, 'wb').write(fileitem.file.read()) print 'Status: 204\r\n\r\n' # Success, don't reload page else: # Error, send a message print """\ Status: 500\r Content-Type: text/html;charset=UTF-8\r <html>\r <body>\r <p>Error: No se subi&oacute; el archivo.</p>\r </body>\r </html>\r """
<commit_before>#!/usr/bin/python # Based on examples from # http://www.tutorialspoint.com/python/python_cgi_programming.htm import sys import cgi import os import cgitb cgitb.enable() CSV_DIR = '../csv/' # CSV upload directory form = cgi.FieldStorage() fileitem = form['filename'] # Get filename # Check if the file was uploaded if fileitem.filename: # strip leading path from file name to avoid # directory traversal attacks fn = os.path.basename(fileitem.filename) open(CSV_DIR + fn, 'wb').write(fileitem.file.read()) print 'Status: 204\r\n\r\n' # Success, don't reload page else: # Error, send a message print """\ Status: 500\r\n Content-Type: text/html;charset=UTF-8\n <html> <body> <p>Error: No se subi&oacute; el archivo.</p> </body> </html> """ <commit_msg>Remove unused modules, fix HTML response<commit_after>
#!/usr/bin/python # Based on examples from # http://www.tutorialspoint.com/python/python_cgi_programming.htm import cgi import os import cgitb cgitb.enable() CSV_DIR = '../csv/' # CSV upload directory form = cgi.FieldStorage() fileitem = form['filename'] # Get filename # Check if the file was uploaded if fileitem.filename: # strip leading path from file name to avoid # directory traversal attacks fn = os.path.basename(fileitem.filename) open(CSV_DIR + fn, 'wb').write(fileitem.file.read()) print 'Status: 204\r\n\r\n' # Success, don't reload page else: # Error, send a message print """\ Status: 500\r Content-Type: text/html;charset=UTF-8\r <html>\r <body>\r <p>Error: No se subi&oacute; el archivo.</p>\r </body>\r </html>\r """
#!/usr/bin/python # Based on examples from # http://www.tutorialspoint.com/python/python_cgi_programming.htm import sys import cgi import os import cgitb cgitb.enable() CSV_DIR = '../csv/' # CSV upload directory form = cgi.FieldStorage() fileitem = form['filename'] # Get filename # Check if the file was uploaded if fileitem.filename: # strip leading path from file name to avoid # directory traversal attacks fn = os.path.basename(fileitem.filename) open(CSV_DIR + fn, 'wb').write(fileitem.file.read()) print 'Status: 204\r\n\r\n' # Success, don't reload page else: # Error, send a message print """\ Status: 500\r\n Content-Type: text/html;charset=UTF-8\n <html> <body> <p>Error: No se subi&oacute; el archivo.</p> </body> </html> """ Remove unused modules, fix HTML response#!/usr/bin/python # Based on examples from # http://www.tutorialspoint.com/python/python_cgi_programming.htm import cgi import os import cgitb cgitb.enable() CSV_DIR = '../csv/' # CSV upload directory form = cgi.FieldStorage() fileitem = form['filename'] # Get filename # Check if the file was uploaded if fileitem.filename: # strip leading path from file name to avoid # directory traversal attacks fn = os.path.basename(fileitem.filename) open(CSV_DIR + fn, 'wb').write(fileitem.file.read()) print 'Status: 204\r\n\r\n' # Success, don't reload page else: # Error, send a message print """\ Status: 500\r Content-Type: text/html;charset=UTF-8\r <html>\r <body>\r <p>Error: No se subi&oacute; el archivo.</p>\r </body>\r </html>\r """
<commit_before>#!/usr/bin/python # Based on examples from # http://www.tutorialspoint.com/python/python_cgi_programming.htm import sys import cgi import os import cgitb cgitb.enable() CSV_DIR = '../csv/' # CSV upload directory form = cgi.FieldStorage() fileitem = form['filename'] # Get filename # Check if the file was uploaded if fileitem.filename: # strip leading path from file name to avoid # directory traversal attacks fn = os.path.basename(fileitem.filename) open(CSV_DIR + fn, 'wb').write(fileitem.file.read()) print 'Status: 204\r\n\r\n' # Success, don't reload page else: # Error, send a message print """\ Status: 500\r\n Content-Type: text/html;charset=UTF-8\n <html> <body> <p>Error: No se subi&oacute; el archivo.</p> </body> </html> """ <commit_msg>Remove unused modules, fix HTML response<commit_after>#!/usr/bin/python # Based on examples from # http://www.tutorialspoint.com/python/python_cgi_programming.htm import cgi import os import cgitb cgitb.enable() CSV_DIR = '../csv/' # CSV upload directory form = cgi.FieldStorage() fileitem = form['filename'] # Get filename # Check if the file was uploaded if fileitem.filename: # strip leading path from file name to avoid # directory traversal attacks fn = os.path.basename(fileitem.filename) open(CSV_DIR + fn, 'wb').write(fileitem.file.read()) print 'Status: 204\r\n\r\n' # Success, don't reload page else: # Error, send a message print """\ Status: 500\r Content-Type: text/html;charset=UTF-8\r <html>\r <body>\r <p>Error: No se subi&oacute; el archivo.</p>\r </body>\r </html>\r """
041d7285b2ec5c6f323c10a4bbfd388c9d1cb216
skyscanner/__init__.py
skyscanner/__init__.py
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '0.1.0' from skyscanner import Flights, FlightsCache, Hotels, CarHire
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '0.1.0' from .skyscanner import Flights, FlightsCache, Hotels, CarHire
Make it work with Python 3.
Make it work with Python 3.
Python
apache-2.0
valery-barysok/skyscanner-python-sdk,Skyscanner/skyscanner-python-sdk,joesarre/skyscanner-python-sdk
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '0.1.0' from skyscanner import Flights, FlightsCache, Hotels, CarHireMake it work with Python 3.
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '0.1.0' from .skyscanner import Flights, FlightsCache, Hotels, CarHire
<commit_before># -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '0.1.0' from skyscanner import Flights, FlightsCache, Hotels, CarHire<commit_msg>Make it work with Python 3.<commit_after>
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '0.1.0' from .skyscanner import Flights, FlightsCache, Hotels, CarHire
# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '0.1.0' from skyscanner import Flights, FlightsCache, Hotels, CarHireMake it work with Python 3.# -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '0.1.0' from .skyscanner import Flights, FlightsCache, Hotels, CarHire
<commit_before># -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '0.1.0' from skyscanner import Flights, FlightsCache, Hotels, CarHire<commit_msg>Make it work with Python 3.<commit_after># -*- coding: utf-8 -*- __author__ = 'Ardy Dedase' __email__ = 'ardy.dedase@skyscanner.net' __version__ = '0.1.0' from .skyscanner import Flights, FlightsCache, Hotels, CarHire
cb31dcc7be5e89c865686d9a2a07e8a64c9c0179
gamernews/apps/threadedcomments/views.py
gamernews/apps/threadedcomments/views.py
from django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.utils.translation import ugettext as _ from django.views.generic.list import ListView from core.models import Account as User from django_comments.models import Comment from .models import ThreadedComment def single_comment(request, id): comment = get_object_or_404(ThreadedComment, id=id) variables = RequestContext(request, {'comment': comment}) return render_to_response('comments/single.html', variables) def comment_posted( request ): if request.GET['c']: comment_id, feeditem_id = request.GET['c'].split( ':' ) feeditem = FeedItem.objects.get( pk=feeditem_id ) if post: return HttpResponseRedirect( feeditem.get_absolute_url() ) return HttpResponseRedirect( "/" )
from django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.utils.translation import ugettext as _ from django.views.generic.list import ListView from core.models import Account as User from django_comments.models import Comment from news.models import Blob, BlobInstance from .models import ThreadedComment def single_comment(request, id): comment = get_object_or_404(ThreadedComment, id=id) variables = RequestContext(request, {'comment': comment}) return render_to_response('comments/single.html', variables) def comment_posted( request ): if request.GET['c']: comment_id, blob_id = request.GET['c'].split( ':' ) blob = Blob.objects.get( pk=blob_id ) if post: return HttpResponseRedirect( blob.get_absolute_url() ) return HttpResponseRedirect( "/" )
Remove name, url and email from comment form
Remove name, url and email from comment form
Python
mit
underlost/GamerNews,underlost/GamerNews
from django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.utils.translation import ugettext as _ from django.views.generic.list import ListView from core.models import Account as User from django_comments.models import Comment from .models import ThreadedComment def single_comment(request, id): comment = get_object_or_404(ThreadedComment, id=id) variables = RequestContext(request, {'comment': comment}) return render_to_response('comments/single.html', variables) def comment_posted( request ): if request.GET['c']: comment_id, feeditem_id = request.GET['c'].split( ':' ) feeditem = FeedItem.objects.get( pk=feeditem_id ) if post: return HttpResponseRedirect( feeditem.get_absolute_url() ) return HttpResponseRedirect( "/" ) Remove name, url and email from comment form
from django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.utils.translation import ugettext as _ from django.views.generic.list import ListView from core.models import Account as User from django_comments.models import Comment from news.models import Blob, BlobInstance from .models import ThreadedComment def single_comment(request, id): comment = get_object_or_404(ThreadedComment, id=id) variables = RequestContext(request, {'comment': comment}) return render_to_response('comments/single.html', variables) def comment_posted( request ): if request.GET['c']: comment_id, blob_id = request.GET['c'].split( ':' ) blob = Blob.objects.get( pk=blob_id ) if post: return HttpResponseRedirect( blob.get_absolute_url() ) return HttpResponseRedirect( "/" )
<commit_before>from django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.utils.translation import ugettext as _ from django.views.generic.list import ListView from core.models import Account as User from django_comments.models import Comment from .models import ThreadedComment def single_comment(request, id): comment = get_object_or_404(ThreadedComment, id=id) variables = RequestContext(request, {'comment': comment}) return render_to_response('comments/single.html', variables) def comment_posted( request ): if request.GET['c']: comment_id, feeditem_id = request.GET['c'].split( ':' ) feeditem = FeedItem.objects.get( pk=feeditem_id ) if post: return HttpResponseRedirect( feeditem.get_absolute_url() ) return HttpResponseRedirect( "/" ) <commit_msg>Remove name, url and email from comment form<commit_after>
from django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.utils.translation import ugettext as _ from django.views.generic.list import ListView from core.models import Account as User from django_comments.models import Comment from news.models import Blob, BlobInstance from .models import ThreadedComment def single_comment(request, id): comment = get_object_or_404(ThreadedComment, id=id) variables = RequestContext(request, {'comment': comment}) return render_to_response('comments/single.html', variables) def comment_posted( request ): if request.GET['c']: comment_id, blob_id = request.GET['c'].split( ':' ) blob = Blob.objects.get( pk=blob_id ) if post: return HttpResponseRedirect( blob.get_absolute_url() ) return HttpResponseRedirect( "/" )
from django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.utils.translation import ugettext as _ from django.views.generic.list import ListView from core.models import Account as User from django_comments.models import Comment from .models import ThreadedComment def single_comment(request, id): comment = get_object_or_404(ThreadedComment, id=id) variables = RequestContext(request, {'comment': comment}) return render_to_response('comments/single.html', variables) def comment_posted( request ): if request.GET['c']: comment_id, feeditem_id = request.GET['c'].split( ':' ) feeditem = FeedItem.objects.get( pk=feeditem_id ) if post: return HttpResponseRedirect( feeditem.get_absolute_url() ) return HttpResponseRedirect( "/" ) Remove name, url and email from comment formfrom django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.utils.translation import ugettext as _ from django.views.generic.list import ListView from core.models import Account as User from django_comments.models import Comment from news.models import Blob, BlobInstance from .models import ThreadedComment def single_comment(request, id): comment = get_object_or_404(ThreadedComment, id=id) variables = RequestContext(request, {'comment': comment}) return render_to_response('comments/single.html', variables) def comment_posted( request ): if request.GET['c']: comment_id, blob_id = request.GET['c'].split( ':' ) blob = Blob.objects.get( pk=blob_id ) if post: return HttpResponseRedirect( blob.get_absolute_url() ) return HttpResponseRedirect( "/" )
<commit_before>from django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.utils.translation import ugettext as _ from django.views.generic.list import ListView from core.models import Account as User from django_comments.models import Comment from .models import ThreadedComment def single_comment(request, id): comment = get_object_or_404(ThreadedComment, id=id) variables = RequestContext(request, {'comment': comment}) return render_to_response('comments/single.html', variables) def comment_posted( request ): if request.GET['c']: comment_id, feeditem_id = request.GET['c'].split( ':' ) feeditem = FeedItem.objects.get( pk=feeditem_id ) if post: return HttpResponseRedirect( feeditem.get_absolute_url() ) return HttpResponseRedirect( "/" ) <commit_msg>Remove name, url and email from comment form<commit_after>from django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.utils.translation import ugettext as _ from django.views.generic.list import ListView from core.models import Account as User from django_comments.models import Comment from news.models import Blob, BlobInstance from .models import ThreadedComment def single_comment(request, id): comment = get_object_or_404(ThreadedComment, id=id) variables = RequestContext(request, {'comment': comment}) return render_to_response('comments/single.html', variables) def comment_posted( request ): if request.GET['c']: comment_id, blob_id = request.GET['c'].split( ':' ) blob = Blob.objects.get( pk=blob_id ) if post: return HttpResponseRedirect( blob.get_absolute_url() ) return HttpResponseRedirect( "/" )
5fd5d32a04615656af03e0c1fa71ca7b81a72b1f
conda_env/installers/conda.py
conda_env/installers/conda.py
from __future__ import absolute_import import sys from conda.cli import common from conda import plan def install(prefix, specs, args, data): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap() actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json)
from __future__ import absolute_import import sys from conda.cli import common from conda import plan def install(prefix, specs, args, data): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap( channel_urls=data.get('channels', ()) ) actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json)
Add support for channels in the spec
Add support for channels in the spec
Python
bsd-3-clause
conda/conda-env,ESSS/conda-env,phobson/conda-env,asmeurer/conda-env,nicoddemus/conda-env,mikecroucher/conda-env,conda/conda-env,dan-blanchard/conda-env,isaac-kit/conda-env,isaac-kit/conda-env,mikecroucher/conda-env,ESSS/conda-env,nicoddemus/conda-env,phobson/conda-env,dan-blanchard/conda-env,asmeurer/conda-env
from __future__ import absolute_import import sys from conda.cli import common from conda import plan def install(prefix, specs, args, data): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap() actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) Add support for channels in the spec
from __future__ import absolute_import import sys from conda.cli import common from conda import plan def install(prefix, specs, args, data): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap( channel_urls=data.get('channels', ()) ) actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json)
<commit_before>from __future__ import absolute_import import sys from conda.cli import common from conda import plan def install(prefix, specs, args, data): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap() actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) <commit_msg>Add support for channels in the spec<commit_after>
from __future__ import absolute_import import sys from conda.cli import common from conda import plan def install(prefix, specs, args, data): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap( channel_urls=data.get('channels', ()) ) actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json)
from __future__ import absolute_import import sys from conda.cli import common from conda import plan def install(prefix, specs, args, data): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap() actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) Add support for channels in the specfrom __future__ import absolute_import import sys from conda.cli import common from conda import plan def install(prefix, specs, args, data): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap( channel_urls=data.get('channels', ()) ) actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json)
<commit_before>from __future__ import absolute_import import sys from conda.cli import common from conda import plan def install(prefix, specs, args, data): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap() actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) <commit_msg>Add support for channels in the spec<commit_after>from __future__ import absolute_import import sys from conda.cli import common from conda import plan def install(prefix, specs, args, data): # TODO: do we need this? common.check_specs(prefix, specs, json=args.json) # TODO: support all various ways this happens index = common.get_index_trap( channel_urls=data.get('channels', ()) ) actions = plan.install_actions(prefix, index, specs) if plan.nothing_to_do(actions): sys.stderr.write('# TODO handle more gracefully') sys.exit(-1) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json)
08da4c91853b14a264ea07fa5314be437fbce9d4
src/gui/graphscheme.py
src/gui/graphscheme.py
# -*- coding: utf-8 -*- from PySide import QtGui class GraphScheme( QtGui.QGraphicsScene): """ Graph scheme drawing class This class inherits from QtGui.QGraphicsScene and add functions for manage GraphBlocks objects in scheme. """ def __init__(self ): super(GraphScheme, self).__init__() self.layout = QtGui.QGraphicsGridLayout() self.form = QtGui.QGraphicsWidget() self.form.setLayout(self.layout) self.addItem(self.form) self.form.setPos(0, 0) def add_block(self, block, row, column): """ Add GraphBlock to scheme into specified row and column """ self.layout.addItem(block, row, column)
# -*- coding: utf-8 -*- from PySide import QtGui, QtCore class GraphScheme( QtGui.QGraphicsScene): """ Graph scheme drawing class This class inherits from QtGui.QGraphicsScene and add functions for manage GraphBlocks objects in scheme. """ def __init__(self ): super(GraphScheme, self).__init__() self.layout = QtGui.QGraphicsGridLayout() self.form = QtGui.QGraphicsWidget() self.form.setLayout(self.layout) self.addItem(self.form) self.form.setPos(0, 0) gradient = QtGui.QLinearGradient(0, 0, 0, 4000) gradient.setColorAt( 0, QtGui.QColor(255, 255, 255)) gradient.setColorAt( 1, QtGui.QColor(0, 0, 255)) self.setBackgroundBrush(gradient) def add_block(self, block, row, column): """ Add GraphBlock to scheme into specified row and column """ self.layout.addItem(block, row, column)
Add gradient background to GraphScheme
Add gradient background to GraphScheme
Python
lgpl-2.1
anton-golubkov/Garland,anton-golubkov/Garland
# -*- coding: utf-8 -*- from PySide import QtGui class GraphScheme( QtGui.QGraphicsScene): """ Graph scheme drawing class This class inherits from QtGui.QGraphicsScene and add functions for manage GraphBlocks objects in scheme. """ def __init__(self ): super(GraphScheme, self).__init__() self.layout = QtGui.QGraphicsGridLayout() self.form = QtGui.QGraphicsWidget() self.form.setLayout(self.layout) self.addItem(self.form) self.form.setPos(0, 0) def add_block(self, block, row, column): """ Add GraphBlock to scheme into specified row and column """ self.layout.addItem(block, row, column) Add gradient background to GraphScheme
# -*- coding: utf-8 -*- from PySide import QtGui, QtCore class GraphScheme( QtGui.QGraphicsScene): """ Graph scheme drawing class This class inherits from QtGui.QGraphicsScene and add functions for manage GraphBlocks objects in scheme. """ def __init__(self ): super(GraphScheme, self).__init__() self.layout = QtGui.QGraphicsGridLayout() self.form = QtGui.QGraphicsWidget() self.form.setLayout(self.layout) self.addItem(self.form) self.form.setPos(0, 0) gradient = QtGui.QLinearGradient(0, 0, 0, 4000) gradient.setColorAt( 0, QtGui.QColor(255, 255, 255)) gradient.setColorAt( 1, QtGui.QColor(0, 0, 255)) self.setBackgroundBrush(gradient) def add_block(self, block, row, column): """ Add GraphBlock to scheme into specified row and column """ self.layout.addItem(block, row, column)
<commit_before># -*- coding: utf-8 -*- from PySide import QtGui class GraphScheme( QtGui.QGraphicsScene): """ Graph scheme drawing class This class inherits from QtGui.QGraphicsScene and add functions for manage GraphBlocks objects in scheme. """ def __init__(self ): super(GraphScheme, self).__init__() self.layout = QtGui.QGraphicsGridLayout() self.form = QtGui.QGraphicsWidget() self.form.setLayout(self.layout) self.addItem(self.form) self.form.setPos(0, 0) def add_block(self, block, row, column): """ Add GraphBlock to scheme into specified row and column """ self.layout.addItem(block, row, column) <commit_msg>Add gradient background to GraphScheme<commit_after>
# -*- coding: utf-8 -*- from PySide import QtGui, QtCore class GraphScheme( QtGui.QGraphicsScene): """ Graph scheme drawing class This class inherits from QtGui.QGraphicsScene and add functions for manage GraphBlocks objects in scheme. """ def __init__(self ): super(GraphScheme, self).__init__() self.layout = QtGui.QGraphicsGridLayout() self.form = QtGui.QGraphicsWidget() self.form.setLayout(self.layout) self.addItem(self.form) self.form.setPos(0, 0) gradient = QtGui.QLinearGradient(0, 0, 0, 4000) gradient.setColorAt( 0, QtGui.QColor(255, 255, 255)) gradient.setColorAt( 1, QtGui.QColor(0, 0, 255)) self.setBackgroundBrush(gradient) def add_block(self, block, row, column): """ Add GraphBlock to scheme into specified row and column """ self.layout.addItem(block, row, column)
# -*- coding: utf-8 -*- from PySide import QtGui class GraphScheme( QtGui.QGraphicsScene): """ Graph scheme drawing class This class inherits from QtGui.QGraphicsScene and add functions for manage GraphBlocks objects in scheme. """ def __init__(self ): super(GraphScheme, self).__init__() self.layout = QtGui.QGraphicsGridLayout() self.form = QtGui.QGraphicsWidget() self.form.setLayout(self.layout) self.addItem(self.form) self.form.setPos(0, 0) def add_block(self, block, row, column): """ Add GraphBlock to scheme into specified row and column """ self.layout.addItem(block, row, column) Add gradient background to GraphScheme# -*- coding: utf-8 -*- from PySide import QtGui, QtCore class GraphScheme( QtGui.QGraphicsScene): """ Graph scheme drawing class This class inherits from QtGui.QGraphicsScene and add functions for manage GraphBlocks objects in scheme. """ def __init__(self ): super(GraphScheme, self).__init__() self.layout = QtGui.QGraphicsGridLayout() self.form = QtGui.QGraphicsWidget() self.form.setLayout(self.layout) self.addItem(self.form) self.form.setPos(0, 0) gradient = QtGui.QLinearGradient(0, 0, 0, 4000) gradient.setColorAt( 0, QtGui.QColor(255, 255, 255)) gradient.setColorAt( 1, QtGui.QColor(0, 0, 255)) self.setBackgroundBrush(gradient) def add_block(self, block, row, column): """ Add GraphBlock to scheme into specified row and column """ self.layout.addItem(block, row, column)
<commit_before># -*- coding: utf-8 -*- from PySide import QtGui class GraphScheme( QtGui.QGraphicsScene): """ Graph scheme drawing class This class inherits from QtGui.QGraphicsScene and add functions for manage GraphBlocks objects in scheme. """ def __init__(self ): super(GraphScheme, self).__init__() self.layout = QtGui.QGraphicsGridLayout() self.form = QtGui.QGraphicsWidget() self.form.setLayout(self.layout) self.addItem(self.form) self.form.setPos(0, 0) def add_block(self, block, row, column): """ Add GraphBlock to scheme into specified row and column """ self.layout.addItem(block, row, column) <commit_msg>Add gradient background to GraphScheme<commit_after># -*- coding: utf-8 -*- from PySide import QtGui, QtCore class GraphScheme( QtGui.QGraphicsScene): """ Graph scheme drawing class This class inherits from QtGui.QGraphicsScene and add functions for manage GraphBlocks objects in scheme. """ def __init__(self ): super(GraphScheme, self).__init__() self.layout = QtGui.QGraphicsGridLayout() self.form = QtGui.QGraphicsWidget() self.form.setLayout(self.layout) self.addItem(self.form) self.form.setPos(0, 0) gradient = QtGui.QLinearGradient(0, 0, 0, 4000) gradient.setColorAt( 0, QtGui.QColor(255, 255, 255)) gradient.setColorAt( 1, QtGui.QColor(0, 0, 255)) self.setBackgroundBrush(gradient) def add_block(self, block, row, column): """ Add GraphBlock to scheme into specified row and column """ self.layout.addItem(block, row, column)
c5742bb27aa8446cb5b4c491df6be9c733a1408f
unitary/examples/tictactoe/enums.py
unitary/examples/tictactoe/enums.py
# Copyright 2022 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import enum class TicTacSquare(enum.Enum): EMPTY = 0 X = 1 O = 2 class TicTacResult(enum.Enum): UNFINISHED = 0 X_WINS = 1 O_WINS = 2 DRAW = 3 BOTH_WIN = 4
# Copyright 2022 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import enum class TicTacSquare(enum.Enum): EMPTY = 0 X = 1 O = 2 class TicTacResult(enum.Enum): UNFINISHED = 0 X_WINS = 1 O_WINS = 2 DRAW = 3 BOTH_WIN = 4 class TicTacRules(enum.Enum): CLASSICAL = 0 MINIMAL_QUANTUM = 1 FULLY_QUANTUM = 2
Add enum for different rulesets
Add enum for different rulesets
Python
apache-2.0
quantumlib/unitary,quantumlib/unitary
# Copyright 2022 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import enum class TicTacSquare(enum.Enum): EMPTY = 0 X = 1 O = 2 class TicTacResult(enum.Enum): UNFINISHED = 0 X_WINS = 1 O_WINS = 2 DRAW = 3 BOTH_WIN = 4 Add enum for different rulesets
# Copyright 2022 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import enum class TicTacSquare(enum.Enum): EMPTY = 0 X = 1 O = 2 class TicTacResult(enum.Enum): UNFINISHED = 0 X_WINS = 1 O_WINS = 2 DRAW = 3 BOTH_WIN = 4 class TicTacRules(enum.Enum): CLASSICAL = 0 MINIMAL_QUANTUM = 1 FULLY_QUANTUM = 2
<commit_before># Copyright 2022 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import enum class TicTacSquare(enum.Enum): EMPTY = 0 X = 1 O = 2 class TicTacResult(enum.Enum): UNFINISHED = 0 X_WINS = 1 O_WINS = 2 DRAW = 3 BOTH_WIN = 4 <commit_msg>Add enum for different rulesets<commit_after>
# Copyright 2022 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import enum class TicTacSquare(enum.Enum): EMPTY = 0 X = 1 O = 2 class TicTacResult(enum.Enum): UNFINISHED = 0 X_WINS = 1 O_WINS = 2 DRAW = 3 BOTH_WIN = 4 class TicTacRules(enum.Enum): CLASSICAL = 0 MINIMAL_QUANTUM = 1 FULLY_QUANTUM = 2
# Copyright 2022 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import enum class TicTacSquare(enum.Enum): EMPTY = 0 X = 1 O = 2 class TicTacResult(enum.Enum): UNFINISHED = 0 X_WINS = 1 O_WINS = 2 DRAW = 3 BOTH_WIN = 4 Add enum for different rulesets# Copyright 2022 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import enum class TicTacSquare(enum.Enum): EMPTY = 0 X = 1 O = 2 class TicTacResult(enum.Enum): UNFINISHED = 0 X_WINS = 1 O_WINS = 2 DRAW = 3 BOTH_WIN = 4 class TicTacRules(enum.Enum): CLASSICAL = 0 MINIMAL_QUANTUM = 1 FULLY_QUANTUM = 2
<commit_before># Copyright 2022 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import enum class TicTacSquare(enum.Enum): EMPTY = 0 X = 1 O = 2 class TicTacResult(enum.Enum): UNFINISHED = 0 X_WINS = 1 O_WINS = 2 DRAW = 3 BOTH_WIN = 4 <commit_msg>Add enum for different rulesets<commit_after># Copyright 2022 Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import enum class TicTacSquare(enum.Enum): EMPTY = 0 X = 1 O = 2 class TicTacResult(enum.Enum): UNFINISHED = 0 X_WINS = 1 O_WINS = 2 DRAW = 3 BOTH_WIN = 4 class TicTacRules(enum.Enum): CLASSICAL = 0 MINIMAL_QUANTUM = 1 FULLY_QUANTUM = 2
1dfb9fd979206af415e20fc58b905c435a187008
app/soc/models/grading_project_survey.py
app/soc/models/grading_project_survey.py
#!/usr/bin/python2.5 # # Copyright 2009 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains the GradingProjectSurvey model. """ __authors__ = [ '"Daniel Diniz" <ajaksu@gmail.com>', '"Lennard de Rijk" <ljvderijk@gmail.com>', ] from soc.models.project_survey import ProjectSurvey class GradingProjectSurvey(ProjectSurvey): """Survey for Mentors for each of their StudentProjects. """ def __init__(self, *args, **kwargs): super(GradingProjectSurvey, self).__init__(*args, **kwargs) self.taking_access = 'mentor'
#!/usr/bin/python2.5 # # Copyright 2009 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains the GradingProjectSurvey model. """ __authors__ = [ '"Daniel Diniz" <ajaksu@gmail.com>', '"Lennard de Rijk" <ljvderijk@gmail.com>', ] from soc.models.project_survey import ProjectSurvey class GradingProjectSurvey(ProjectSurvey): """Survey for Mentors for each of their StudentProjects. """ def __init__(self, *args, **kwargs): super(GradingProjectSurvey, self).__init__(*args, **kwargs) self.taking_access = 'org'
Set default taking access for GradingProjectSurvey to org.
Set default taking access for GradingProjectSurvey to org. This will allow Mentors and Org Admins to take GradingProjectSurveys in case that an Org Admin has no Mentor roles.
Python
apache-2.0
MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging
#!/usr/bin/python2.5 # # Copyright 2009 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains the GradingProjectSurvey model. """ __authors__ = [ '"Daniel Diniz" <ajaksu@gmail.com>', '"Lennard de Rijk" <ljvderijk@gmail.com>', ] from soc.models.project_survey import ProjectSurvey class GradingProjectSurvey(ProjectSurvey): """Survey for Mentors for each of their StudentProjects. """ def __init__(self, *args, **kwargs): super(GradingProjectSurvey, self).__init__(*args, **kwargs) self.taking_access = 'mentor' Set default taking access for GradingProjectSurvey to org. This will allow Mentors and Org Admins to take GradingProjectSurveys in case that an Org Admin has no Mentor roles.
#!/usr/bin/python2.5 # # Copyright 2009 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains the GradingProjectSurvey model. """ __authors__ = [ '"Daniel Diniz" <ajaksu@gmail.com>', '"Lennard de Rijk" <ljvderijk@gmail.com>', ] from soc.models.project_survey import ProjectSurvey class GradingProjectSurvey(ProjectSurvey): """Survey for Mentors for each of their StudentProjects. """ def __init__(self, *args, **kwargs): super(GradingProjectSurvey, self).__init__(*args, **kwargs) self.taking_access = 'org'
<commit_before>#!/usr/bin/python2.5 # # Copyright 2009 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains the GradingProjectSurvey model. """ __authors__ = [ '"Daniel Diniz" <ajaksu@gmail.com>', '"Lennard de Rijk" <ljvderijk@gmail.com>', ] from soc.models.project_survey import ProjectSurvey class GradingProjectSurvey(ProjectSurvey): """Survey for Mentors for each of their StudentProjects. """ def __init__(self, *args, **kwargs): super(GradingProjectSurvey, self).__init__(*args, **kwargs) self.taking_access = 'mentor' <commit_msg>Set default taking access for GradingProjectSurvey to org. This will allow Mentors and Org Admins to take GradingProjectSurveys in case that an Org Admin has no Mentor roles.<commit_after>
#!/usr/bin/python2.5 # # Copyright 2009 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains the GradingProjectSurvey model. """ __authors__ = [ '"Daniel Diniz" <ajaksu@gmail.com>', '"Lennard de Rijk" <ljvderijk@gmail.com>', ] from soc.models.project_survey import ProjectSurvey class GradingProjectSurvey(ProjectSurvey): """Survey for Mentors for each of their StudentProjects. """ def __init__(self, *args, **kwargs): super(GradingProjectSurvey, self).__init__(*args, **kwargs) self.taking_access = 'org'
#!/usr/bin/python2.5 # # Copyright 2009 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains the GradingProjectSurvey model. """ __authors__ = [ '"Daniel Diniz" <ajaksu@gmail.com>', '"Lennard de Rijk" <ljvderijk@gmail.com>', ] from soc.models.project_survey import ProjectSurvey class GradingProjectSurvey(ProjectSurvey): """Survey for Mentors for each of their StudentProjects. """ def __init__(self, *args, **kwargs): super(GradingProjectSurvey, self).__init__(*args, **kwargs) self.taking_access = 'mentor' Set default taking access for GradingProjectSurvey to org. This will allow Mentors and Org Admins to take GradingProjectSurveys in case that an Org Admin has no Mentor roles.#!/usr/bin/python2.5 # # Copyright 2009 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains the GradingProjectSurvey model. """ __authors__ = [ '"Daniel Diniz" <ajaksu@gmail.com>', '"Lennard de Rijk" <ljvderijk@gmail.com>', ] from soc.models.project_survey import ProjectSurvey class GradingProjectSurvey(ProjectSurvey): """Survey for Mentors for each of their StudentProjects. """ def __init__(self, *args, **kwargs): super(GradingProjectSurvey, self).__init__(*args, **kwargs) self.taking_access = 'org'
<commit_before>#!/usr/bin/python2.5 # # Copyright 2009 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains the GradingProjectSurvey model. """ __authors__ = [ '"Daniel Diniz" <ajaksu@gmail.com>', '"Lennard de Rijk" <ljvderijk@gmail.com>', ] from soc.models.project_survey import ProjectSurvey class GradingProjectSurvey(ProjectSurvey): """Survey for Mentors for each of their StudentProjects. """ def __init__(self, *args, **kwargs): super(GradingProjectSurvey, self).__init__(*args, **kwargs) self.taking_access = 'mentor' <commit_msg>Set default taking access for GradingProjectSurvey to org. This will allow Mentors and Org Admins to take GradingProjectSurveys in case that an Org Admin has no Mentor roles.<commit_after>#!/usr/bin/python2.5 # # Copyright 2009 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains the GradingProjectSurvey model. """ __authors__ = [ '"Daniel Diniz" <ajaksu@gmail.com>', '"Lennard de Rijk" <ljvderijk@gmail.com>', ] from soc.models.project_survey import ProjectSurvey class GradingProjectSurvey(ProjectSurvey): """Survey for Mentors for each of their StudentProjects. """ def __init__(self, *args, **kwargs): super(GradingProjectSurvey, self).__init__(*args, **kwargs) self.taking_access = 'org'
ee37119a4f77eef5c8163936d982e178c42cbc00
src/adhocracy/lib/machine_name.py
src/adhocracy/lib/machine_name.py
import platform class IncludeMachineName(object): def __init__(self, app, config): self.app = app self.config = config def __call__(self, environ, start_response): def local_response(status, headers, exc_info=None): headers.append(('X-Server-Machine', platform.node())) start_response(status, headers, exc_info) return self.app(environ, local_response)
import os import platform class IncludeMachineName(object): def __init__(self, app, config): self.app = app self.config = config def __call__(self, environ, start_response): def local_response(status, headers, exc_info=None): machine_id = '%s:%s (PID %d)' % ( platform.node(), environ.get('SERVER_PORT'), os.getpid()) headers.append(('X-Server-Machine', machine_id)) start_response(status, headers, exc_info) return self.app(environ, local_response)
Add Server Port and PID to the X-Server-Machine header
Add Server Port and PID to the X-Server-Machine header Fixes hhucn/adhocracy.hhu_theme#429
Python
agpl-3.0
liqd/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,liqd/adhocracy,alkadis/vcv,phihag/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,phihag/adhocracy,liqd/adhocracy,phihag/adhocracy,phihag/adhocracy,alkadis/vcv,phihag/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,DanielNeugebauer/adhocracy
import platform class IncludeMachineName(object): def __init__(self, app, config): self.app = app self.config = config def __call__(self, environ, start_response): def local_response(status, headers, exc_info=None): headers.append(('X-Server-Machine', platform.node())) start_response(status, headers, exc_info) return self.app(environ, local_response) Add Server Port and PID to the X-Server-Machine header Fixes hhucn/adhocracy.hhu_theme#429
import os import platform class IncludeMachineName(object): def __init__(self, app, config): self.app = app self.config = config def __call__(self, environ, start_response): def local_response(status, headers, exc_info=None): machine_id = '%s:%s (PID %d)' % ( platform.node(), environ.get('SERVER_PORT'), os.getpid()) headers.append(('X-Server-Machine', machine_id)) start_response(status, headers, exc_info) return self.app(environ, local_response)
<commit_before> import platform class IncludeMachineName(object): def __init__(self, app, config): self.app = app self.config = config def __call__(self, environ, start_response): def local_response(status, headers, exc_info=None): headers.append(('X-Server-Machine', platform.node())) start_response(status, headers, exc_info) return self.app(environ, local_response) <commit_msg>Add Server Port and PID to the X-Server-Machine header Fixes hhucn/adhocracy.hhu_theme#429<commit_after>
import os import platform class IncludeMachineName(object): def __init__(self, app, config): self.app = app self.config = config def __call__(self, environ, start_response): def local_response(status, headers, exc_info=None): machine_id = '%s:%s (PID %d)' % ( platform.node(), environ.get('SERVER_PORT'), os.getpid()) headers.append(('X-Server-Machine', machine_id)) start_response(status, headers, exc_info) return self.app(environ, local_response)
import platform class IncludeMachineName(object): def __init__(self, app, config): self.app = app self.config = config def __call__(self, environ, start_response): def local_response(status, headers, exc_info=None): headers.append(('X-Server-Machine', platform.node())) start_response(status, headers, exc_info) return self.app(environ, local_response) Add Server Port and PID to the X-Server-Machine header Fixes hhucn/adhocracy.hhu_theme#429 import os import platform class IncludeMachineName(object): def __init__(self, app, config): self.app = app self.config = config def __call__(self, environ, start_response): def local_response(status, headers, exc_info=None): machine_id = '%s:%s (PID %d)' % ( platform.node(), environ.get('SERVER_PORT'), os.getpid()) headers.append(('X-Server-Machine', machine_id)) start_response(status, headers, exc_info) return self.app(environ, local_response)
<commit_before> import platform class IncludeMachineName(object): def __init__(self, app, config): self.app = app self.config = config def __call__(self, environ, start_response): def local_response(status, headers, exc_info=None): headers.append(('X-Server-Machine', platform.node())) start_response(status, headers, exc_info) return self.app(environ, local_response) <commit_msg>Add Server Port and PID to the X-Server-Machine header Fixes hhucn/adhocracy.hhu_theme#429<commit_after> import os import platform class IncludeMachineName(object): def __init__(self, app, config): self.app = app self.config = config def __call__(self, environ, start_response): def local_response(status, headers, exc_info=None): machine_id = '%s:%s (PID %d)' % ( platform.node(), environ.get('SERVER_PORT'), os.getpid()) headers.append(('X-Server-Machine', machine_id)) start_response(status, headers, exc_info) return self.app(environ, local_response)
60c4534b1d375aecfe39948e27bc06d0f34907f3
bioagents/resources/trips_ont_manager.py
bioagents/resources/trips_ont_manager.py
import os from indra.preassembler.hierarchy_manager import HierarchyManager # Make a TRIPS ontology _fname = os.path.join(os.path.dirname(__file__), 'trips_ontology.rdf') trips_ontology = HierarchyManager(_fname, uri_as_name=False) trips_ontology.relations_prefix = 'http://trips.ihmc.us/relations/' trips_ontology.initialize() def trips_isa(concept1, concept2): # Preprocess to make this more general concept1 = concept1.lower().replace('ont::', '') concept2 = concept2.lower().replace('ont::', '') if concept1 == concept2: return True isa = trips_ontology.isa('http://trips.ihmc.us/concepts/', concept1, 'http://trips.ihmc.us/concepts/', concept2) return isa
import os from indra.preassembler.hierarchy_manager import HierarchyManager # Make a TRIPS ontology _fname = os.path.join(os.path.dirname(__file__), 'trips_ontology.rdf') trips_ontology = HierarchyManager(_fname, uri_as_name=False, build_closure=True) trips_ontology.relations_prefix = 'http://trips.ihmc.us/relations/' trips_ontology.initialize() def trips_isa(concept1, concept2): # Preprocess to make this more general concept1 = concept1.lower().replace('ont::', '') concept2 = concept2.lower().replace('ont::', '') if concept1 == concept2: return True isa = trips_ontology.isa('http://trips.ihmc.us/concepts/', concept1, 'http://trips.ihmc.us/concepts/', concept2) return isa
Build closure for TRIPS ontology
Build closure for TRIPS ontology
Python
bsd-2-clause
sorgerlab/bioagents,bgyori/bioagents
import os from indra.preassembler.hierarchy_manager import HierarchyManager # Make a TRIPS ontology _fname = os.path.join(os.path.dirname(__file__), 'trips_ontology.rdf') trips_ontology = HierarchyManager(_fname, uri_as_name=False) trips_ontology.relations_prefix = 'http://trips.ihmc.us/relations/' trips_ontology.initialize() def trips_isa(concept1, concept2): # Preprocess to make this more general concept1 = concept1.lower().replace('ont::', '') concept2 = concept2.lower().replace('ont::', '') if concept1 == concept2: return True isa = trips_ontology.isa('http://trips.ihmc.us/concepts/', concept1, 'http://trips.ihmc.us/concepts/', concept2) return isa Build closure for TRIPS ontology
import os from indra.preassembler.hierarchy_manager import HierarchyManager # Make a TRIPS ontology _fname = os.path.join(os.path.dirname(__file__), 'trips_ontology.rdf') trips_ontology = HierarchyManager(_fname, uri_as_name=False, build_closure=True) trips_ontology.relations_prefix = 'http://trips.ihmc.us/relations/' trips_ontology.initialize() def trips_isa(concept1, concept2): # Preprocess to make this more general concept1 = concept1.lower().replace('ont::', '') concept2 = concept2.lower().replace('ont::', '') if concept1 == concept2: return True isa = trips_ontology.isa('http://trips.ihmc.us/concepts/', concept1, 'http://trips.ihmc.us/concepts/', concept2) return isa
<commit_before>import os from indra.preassembler.hierarchy_manager import HierarchyManager # Make a TRIPS ontology _fname = os.path.join(os.path.dirname(__file__), 'trips_ontology.rdf') trips_ontology = HierarchyManager(_fname, uri_as_name=False) trips_ontology.relations_prefix = 'http://trips.ihmc.us/relations/' trips_ontology.initialize() def trips_isa(concept1, concept2): # Preprocess to make this more general concept1 = concept1.lower().replace('ont::', '') concept2 = concept2.lower().replace('ont::', '') if concept1 == concept2: return True isa = trips_ontology.isa('http://trips.ihmc.us/concepts/', concept1, 'http://trips.ihmc.us/concepts/', concept2) return isa <commit_msg>Build closure for TRIPS ontology<commit_after>
import os from indra.preassembler.hierarchy_manager import HierarchyManager # Make a TRIPS ontology _fname = os.path.join(os.path.dirname(__file__), 'trips_ontology.rdf') trips_ontology = HierarchyManager(_fname, uri_as_name=False, build_closure=True) trips_ontology.relations_prefix = 'http://trips.ihmc.us/relations/' trips_ontology.initialize() def trips_isa(concept1, concept2): # Preprocess to make this more general concept1 = concept1.lower().replace('ont::', '') concept2 = concept2.lower().replace('ont::', '') if concept1 == concept2: return True isa = trips_ontology.isa('http://trips.ihmc.us/concepts/', concept1, 'http://trips.ihmc.us/concepts/', concept2) return isa
import os from indra.preassembler.hierarchy_manager import HierarchyManager # Make a TRIPS ontology _fname = os.path.join(os.path.dirname(__file__), 'trips_ontology.rdf') trips_ontology = HierarchyManager(_fname, uri_as_name=False) trips_ontology.relations_prefix = 'http://trips.ihmc.us/relations/' trips_ontology.initialize() def trips_isa(concept1, concept2): # Preprocess to make this more general concept1 = concept1.lower().replace('ont::', '') concept2 = concept2.lower().replace('ont::', '') if concept1 == concept2: return True isa = trips_ontology.isa('http://trips.ihmc.us/concepts/', concept1, 'http://trips.ihmc.us/concepts/', concept2) return isa Build closure for TRIPS ontologyimport os from indra.preassembler.hierarchy_manager import HierarchyManager # Make a TRIPS ontology _fname = os.path.join(os.path.dirname(__file__), 'trips_ontology.rdf') trips_ontology = HierarchyManager(_fname, uri_as_name=False, build_closure=True) trips_ontology.relations_prefix = 'http://trips.ihmc.us/relations/' trips_ontology.initialize() def trips_isa(concept1, concept2): # Preprocess to make this more general concept1 = concept1.lower().replace('ont::', '') concept2 = concept2.lower().replace('ont::', '') if concept1 == concept2: return True isa = trips_ontology.isa('http://trips.ihmc.us/concepts/', concept1, 'http://trips.ihmc.us/concepts/', concept2) return isa
<commit_before>import os from indra.preassembler.hierarchy_manager import HierarchyManager # Make a TRIPS ontology _fname = os.path.join(os.path.dirname(__file__), 'trips_ontology.rdf') trips_ontology = HierarchyManager(_fname, uri_as_name=False) trips_ontology.relations_prefix = 'http://trips.ihmc.us/relations/' trips_ontology.initialize() def trips_isa(concept1, concept2): # Preprocess to make this more general concept1 = concept1.lower().replace('ont::', '') concept2 = concept2.lower().replace('ont::', '') if concept1 == concept2: return True isa = trips_ontology.isa('http://trips.ihmc.us/concepts/', concept1, 'http://trips.ihmc.us/concepts/', concept2) return isa <commit_msg>Build closure for TRIPS ontology<commit_after>import os from indra.preassembler.hierarchy_manager import HierarchyManager # Make a TRIPS ontology _fname = os.path.join(os.path.dirname(__file__), 'trips_ontology.rdf') trips_ontology = HierarchyManager(_fname, uri_as_name=False, build_closure=True) trips_ontology.relations_prefix = 'http://trips.ihmc.us/relations/' trips_ontology.initialize() def trips_isa(concept1, concept2): # Preprocess to make this more general concept1 = concept1.lower().replace('ont::', '') concept2 = concept2.lower().replace('ont::', '') if concept1 == concept2: return True isa = trips_ontology.isa('http://trips.ihmc.us/concepts/', concept1, 'http://trips.ihmc.us/concepts/', concept2) return isa
89e2991109447893b06edf363f223c64e9cafb61
query_result_list.py
query_result_list.py
from query_result_document import QueryResultDocument class QueryResultList: def __init__(self, query, result_documents = []): self.result_documents = result_documents # Guaranteed to be in rank order self.query = query def add( self, rank, document ): self.result_documents.insert( int(rank) - 1, QueryResultDocument( self, rank, document ) ) def results_up_to_rank( self, rank ): return self.result_documents[:int(rank)]
from query_result_document import QueryResultDocument class QueryResultList: def __init__(self, query): self.result_documents = [] # Guaranteed to be in rank order self.query = query def add( self, rank, document ): self.result_documents.insert( int(rank) - 1, QueryResultDocument( self, rank, document ) ) def results_up_to_rank( self, rank ): return self.result_documents[:int(rank)] def length( self ): return len( self.result_documents )
Fix query result list having an empty list default parameter (default parameters get initialized only once!)
Fix query result list having an empty list default parameter (default parameters get initialized only once!)
Python
mit
fire-uta/iiix-data-parser
from query_result_document import QueryResultDocument class QueryResultList: def __init__(self, query, result_documents = []): self.result_documents = result_documents # Guaranteed to be in rank order self.query = query def add( self, rank, document ): self.result_documents.insert( int(rank) - 1, QueryResultDocument( self, rank, document ) ) def results_up_to_rank( self, rank ): return self.result_documents[:int(rank)] Fix query result list having an empty list default parameter (default parameters get initialized only once!)
from query_result_document import QueryResultDocument class QueryResultList: def __init__(self, query): self.result_documents = [] # Guaranteed to be in rank order self.query = query def add( self, rank, document ): self.result_documents.insert( int(rank) - 1, QueryResultDocument( self, rank, document ) ) def results_up_to_rank( self, rank ): return self.result_documents[:int(rank)] def length( self ): return len( self.result_documents )
<commit_before>from query_result_document import QueryResultDocument class QueryResultList: def __init__(self, query, result_documents = []): self.result_documents = result_documents # Guaranteed to be in rank order self.query = query def add( self, rank, document ): self.result_documents.insert( int(rank) - 1, QueryResultDocument( self, rank, document ) ) def results_up_to_rank( self, rank ): return self.result_documents[:int(rank)] <commit_msg>Fix query result list having an empty list default parameter (default parameters get initialized only once!)<commit_after>
from query_result_document import QueryResultDocument class QueryResultList: def __init__(self, query): self.result_documents = [] # Guaranteed to be in rank order self.query = query def add( self, rank, document ): self.result_documents.insert( int(rank) - 1, QueryResultDocument( self, rank, document ) ) def results_up_to_rank( self, rank ): return self.result_documents[:int(rank)] def length( self ): return len( self.result_documents )
from query_result_document import QueryResultDocument class QueryResultList: def __init__(self, query, result_documents = []): self.result_documents = result_documents # Guaranteed to be in rank order self.query = query def add( self, rank, document ): self.result_documents.insert( int(rank) - 1, QueryResultDocument( self, rank, document ) ) def results_up_to_rank( self, rank ): return self.result_documents[:int(rank)] Fix query result list having an empty list default parameter (default parameters get initialized only once!)from query_result_document import QueryResultDocument class QueryResultList: def __init__(self, query): self.result_documents = [] # Guaranteed to be in rank order self.query = query def add( self, rank, document ): self.result_documents.insert( int(rank) - 1, QueryResultDocument( self, rank, document ) ) def results_up_to_rank( self, rank ): return self.result_documents[:int(rank)] def length( self ): return len( self.result_documents )
<commit_before>from query_result_document import QueryResultDocument class QueryResultList: def __init__(self, query, result_documents = []): self.result_documents = result_documents # Guaranteed to be in rank order self.query = query def add( self, rank, document ): self.result_documents.insert( int(rank) - 1, QueryResultDocument( self, rank, document ) ) def results_up_to_rank( self, rank ): return self.result_documents[:int(rank)] <commit_msg>Fix query result list having an empty list default parameter (default parameters get initialized only once!)<commit_after>from query_result_document import QueryResultDocument class QueryResultList: def __init__(self, query): self.result_documents = [] # Guaranteed to be in rank order self.query = query def add( self, rank, document ): self.result_documents.insert( int(rank) - 1, QueryResultDocument( self, rank, document ) ) def results_up_to_rank( self, rank ): return self.result_documents[:int(rank)] def length( self ): return len( self.result_documents )
74a944c47432f707bb8cf6cde421e4927eeeaebb
lib/cretonne/meta/isa/riscv/__init__.py
lib/cretonne/meta/isa/riscv/__init__.py
""" RISC-V Target ------------- `RISC-V <http://riscv.org/>`_ is an open instruction set architecture originally developed at UC Berkeley. It is a RISC-style ISA with either a 32-bit (RV32I) or 64-bit (RV32I) base instruction set and a number of optional extensions: RV32M / RV64M Integer multiplication and division. RV32A / RV64A Atomics. RV32F / RV64F Single-precision IEEE floating point. RV32D / RV64D Double-precision IEEE floating point. RV32G / RV64G General purpose instruction sets. This represents the union of the I, M, A, F, and D instruction sets listed above. """ from __future__ import absolute_import from . import defs from . import encodings, settings, registers # noqa # Re-export the primary target ISA definition. ISA = defs.ISA.finish()
""" RISC-V Target ------------- `RISC-V <https://riscv.org/>`_ is an open instruction set architecture originally developed at UC Berkeley. It is a RISC-style ISA with either a 32-bit (RV32I) or 64-bit (RV32I) base instruction set and a number of optional extensions: RV32M / RV64M Integer multiplication and division. RV32A / RV64A Atomics. RV32F / RV64F Single-precision IEEE floating point. RV32D / RV64D Double-precision IEEE floating point. RV32G / RV64G General purpose instruction sets. This represents the union of the I, M, A, F, and D instruction sets listed above. """ from __future__ import absolute_import from . import defs from . import encodings, settings, registers # noqa # Re-export the primary target ISA definition. ISA = defs.ISA.finish()
Use an https URL rather than http.
Use an https URL rather than http. Found by sphinx's linkcheck.
Python
apache-2.0
sunfishcode/cretonne,stoklund/cretonne,stoklund/cretonne,stoklund/cretonne,sunfishcode/cretonne,sunfishcode/cretonne
""" RISC-V Target ------------- `RISC-V <http://riscv.org/>`_ is an open instruction set architecture originally developed at UC Berkeley. It is a RISC-style ISA with either a 32-bit (RV32I) or 64-bit (RV32I) base instruction set and a number of optional extensions: RV32M / RV64M Integer multiplication and division. RV32A / RV64A Atomics. RV32F / RV64F Single-precision IEEE floating point. RV32D / RV64D Double-precision IEEE floating point. RV32G / RV64G General purpose instruction sets. This represents the union of the I, M, A, F, and D instruction sets listed above. """ from __future__ import absolute_import from . import defs from . import encodings, settings, registers # noqa # Re-export the primary target ISA definition. ISA = defs.ISA.finish() Use an https URL rather than http. Found by sphinx's linkcheck.
""" RISC-V Target ------------- `RISC-V <https://riscv.org/>`_ is an open instruction set architecture originally developed at UC Berkeley. It is a RISC-style ISA with either a 32-bit (RV32I) or 64-bit (RV32I) base instruction set and a number of optional extensions: RV32M / RV64M Integer multiplication and division. RV32A / RV64A Atomics. RV32F / RV64F Single-precision IEEE floating point. RV32D / RV64D Double-precision IEEE floating point. RV32G / RV64G General purpose instruction sets. This represents the union of the I, M, A, F, and D instruction sets listed above. """ from __future__ import absolute_import from . import defs from . import encodings, settings, registers # noqa # Re-export the primary target ISA definition. ISA = defs.ISA.finish()
<commit_before>""" RISC-V Target ------------- `RISC-V <http://riscv.org/>`_ is an open instruction set architecture originally developed at UC Berkeley. It is a RISC-style ISA with either a 32-bit (RV32I) or 64-bit (RV32I) base instruction set and a number of optional extensions: RV32M / RV64M Integer multiplication and division. RV32A / RV64A Atomics. RV32F / RV64F Single-precision IEEE floating point. RV32D / RV64D Double-precision IEEE floating point. RV32G / RV64G General purpose instruction sets. This represents the union of the I, M, A, F, and D instruction sets listed above. """ from __future__ import absolute_import from . import defs from . import encodings, settings, registers # noqa # Re-export the primary target ISA definition. ISA = defs.ISA.finish() <commit_msg>Use an https URL rather than http. Found by sphinx's linkcheck.<commit_after>
""" RISC-V Target ------------- `RISC-V <https://riscv.org/>`_ is an open instruction set architecture originally developed at UC Berkeley. It is a RISC-style ISA with either a 32-bit (RV32I) or 64-bit (RV32I) base instruction set and a number of optional extensions: RV32M / RV64M Integer multiplication and division. RV32A / RV64A Atomics. RV32F / RV64F Single-precision IEEE floating point. RV32D / RV64D Double-precision IEEE floating point. RV32G / RV64G General purpose instruction sets. This represents the union of the I, M, A, F, and D instruction sets listed above. """ from __future__ import absolute_import from . import defs from . import encodings, settings, registers # noqa # Re-export the primary target ISA definition. ISA = defs.ISA.finish()
""" RISC-V Target ------------- `RISC-V <http://riscv.org/>`_ is an open instruction set architecture originally developed at UC Berkeley. It is a RISC-style ISA with either a 32-bit (RV32I) or 64-bit (RV32I) base instruction set and a number of optional extensions: RV32M / RV64M Integer multiplication and division. RV32A / RV64A Atomics. RV32F / RV64F Single-precision IEEE floating point. RV32D / RV64D Double-precision IEEE floating point. RV32G / RV64G General purpose instruction sets. This represents the union of the I, M, A, F, and D instruction sets listed above. """ from __future__ import absolute_import from . import defs from . import encodings, settings, registers # noqa # Re-export the primary target ISA definition. ISA = defs.ISA.finish() Use an https URL rather than http. Found by sphinx's linkcheck.""" RISC-V Target ------------- `RISC-V <https://riscv.org/>`_ is an open instruction set architecture originally developed at UC Berkeley. It is a RISC-style ISA with either a 32-bit (RV32I) or 64-bit (RV32I) base instruction set and a number of optional extensions: RV32M / RV64M Integer multiplication and division. RV32A / RV64A Atomics. RV32F / RV64F Single-precision IEEE floating point. RV32D / RV64D Double-precision IEEE floating point. RV32G / RV64G General purpose instruction sets. This represents the union of the I, M, A, F, and D instruction sets listed above. """ from __future__ import absolute_import from . import defs from . import encodings, settings, registers # noqa # Re-export the primary target ISA definition. ISA = defs.ISA.finish()
<commit_before>""" RISC-V Target ------------- `RISC-V <http://riscv.org/>`_ is an open instruction set architecture originally developed at UC Berkeley. It is a RISC-style ISA with either a 32-bit (RV32I) or 64-bit (RV32I) base instruction set and a number of optional extensions: RV32M / RV64M Integer multiplication and division. RV32A / RV64A Atomics. RV32F / RV64F Single-precision IEEE floating point. RV32D / RV64D Double-precision IEEE floating point. RV32G / RV64G General purpose instruction sets. This represents the union of the I, M, A, F, and D instruction sets listed above. """ from __future__ import absolute_import from . import defs from . import encodings, settings, registers # noqa # Re-export the primary target ISA definition. ISA = defs.ISA.finish() <commit_msg>Use an https URL rather than http. Found by sphinx's linkcheck.<commit_after>""" RISC-V Target ------------- `RISC-V <https://riscv.org/>`_ is an open instruction set architecture originally developed at UC Berkeley. It is a RISC-style ISA with either a 32-bit (RV32I) or 64-bit (RV32I) base instruction set and a number of optional extensions: RV32M / RV64M Integer multiplication and division. RV32A / RV64A Atomics. RV32F / RV64F Single-precision IEEE floating point. RV32D / RV64D Double-precision IEEE floating point. RV32G / RV64G General purpose instruction sets. This represents the union of the I, M, A, F, and D instruction sets listed above. """ from __future__ import absolute_import from . import defs from . import encodings, settings, registers # noqa # Re-export the primary target ISA definition. ISA = defs.ISA.finish()
660e38254788f4eae8bb970d4949e93739374383
src/stratisd_client_dbus/_connection.py
src/stratisd_client_dbus/_connection.py
# Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Miscellaneous helpful methods. """ import dbus from ._constants import SERVICE class Bus(object): """ Our bus. """ # pylint: disable=too-few-public-methods _BUS = None @staticmethod def get_bus(): # pragma: no cover """ Get our bus. """ if Bus._BUS is None: Bus._BUS = dbus.SessionBus() return Bus._BUS def get_object(object_path): # pragma: no cover """ Get an object from an object path. :param str object_path: an object path with a valid format :returns: the proxy object corresponding to the object path :rtype: ProxyObject """ return Bus.get_bus().get_object(SERVICE, object_path, introspect=False)
# Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Miscellaneous helpful methods. """ import dbus from ._constants import SERVICE class Bus(object): """ Our bus. """ # pylint: disable=too-few-public-methods _BUS = None @staticmethod def get_bus(): # pragma: no cover """ Get our bus. """ if Bus._BUS is None: Bus._BUS = dbus.SystemBus() return Bus._BUS def get_object(object_path): # pragma: no cover """ Get an object from an object path. :param str object_path: an object path with a valid format :returns: the proxy object corresponding to the object path :rtype: ProxyObject """ return Bus.get_bus().get_object(SERVICE, object_path, introspect=False)
Change to connect to Stratisd on the system bus
Change to connect to Stratisd on the system bus Stratisd is changing to use the system bus, so naturally we need to also change in order to continue working. Signed-off-by: Andy Grover <b7d524d2f5cc5aebadb6b92b08d3ab26911cde33@redhat.com>
Python
mpl-2.0
trgill/stratisd,trgill/stratisd,stratis-storage/stratisd,stratis-storage/stratisd-client-dbus,mulkieran/stratisd,stratis-storage/stratisd,stratis-storage/stratisd,mulkieran/stratisd
# Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Miscellaneous helpful methods. """ import dbus from ._constants import SERVICE class Bus(object): """ Our bus. """ # pylint: disable=too-few-public-methods _BUS = None @staticmethod def get_bus(): # pragma: no cover """ Get our bus. """ if Bus._BUS is None: Bus._BUS = dbus.SessionBus() return Bus._BUS def get_object(object_path): # pragma: no cover """ Get an object from an object path. :param str object_path: an object path with a valid format :returns: the proxy object corresponding to the object path :rtype: ProxyObject """ return Bus.get_bus().get_object(SERVICE, object_path, introspect=False) Change to connect to Stratisd on the system bus Stratisd is changing to use the system bus, so naturally we need to also change in order to continue working. Signed-off-by: Andy Grover <b7d524d2f5cc5aebadb6b92b08d3ab26911cde33@redhat.com>
# Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Miscellaneous helpful methods. """ import dbus from ._constants import SERVICE class Bus(object): """ Our bus. """ # pylint: disable=too-few-public-methods _BUS = None @staticmethod def get_bus(): # pragma: no cover """ Get our bus. """ if Bus._BUS is None: Bus._BUS = dbus.SystemBus() return Bus._BUS def get_object(object_path): # pragma: no cover """ Get an object from an object path. :param str object_path: an object path with a valid format :returns: the proxy object corresponding to the object path :rtype: ProxyObject """ return Bus.get_bus().get_object(SERVICE, object_path, introspect=False)
<commit_before># Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Miscellaneous helpful methods. """ import dbus from ._constants import SERVICE class Bus(object): """ Our bus. """ # pylint: disable=too-few-public-methods _BUS = None @staticmethod def get_bus(): # pragma: no cover """ Get our bus. """ if Bus._BUS is None: Bus._BUS = dbus.SessionBus() return Bus._BUS def get_object(object_path): # pragma: no cover """ Get an object from an object path. :param str object_path: an object path with a valid format :returns: the proxy object corresponding to the object path :rtype: ProxyObject """ return Bus.get_bus().get_object(SERVICE, object_path, introspect=False) <commit_msg>Change to connect to Stratisd on the system bus Stratisd is changing to use the system bus, so naturally we need to also change in order to continue working. Signed-off-by: Andy Grover <b7d524d2f5cc5aebadb6b92b08d3ab26911cde33@redhat.com><commit_after>
# Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Miscellaneous helpful methods. """ import dbus from ._constants import SERVICE class Bus(object): """ Our bus. """ # pylint: disable=too-few-public-methods _BUS = None @staticmethod def get_bus(): # pragma: no cover """ Get our bus. """ if Bus._BUS is None: Bus._BUS = dbus.SystemBus() return Bus._BUS def get_object(object_path): # pragma: no cover """ Get an object from an object path. :param str object_path: an object path with a valid format :returns: the proxy object corresponding to the object path :rtype: ProxyObject """ return Bus.get_bus().get_object(SERVICE, object_path, introspect=False)
# Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Miscellaneous helpful methods. """ import dbus from ._constants import SERVICE class Bus(object): """ Our bus. """ # pylint: disable=too-few-public-methods _BUS = None @staticmethod def get_bus(): # pragma: no cover """ Get our bus. """ if Bus._BUS is None: Bus._BUS = dbus.SessionBus() return Bus._BUS def get_object(object_path): # pragma: no cover """ Get an object from an object path. :param str object_path: an object path with a valid format :returns: the proxy object corresponding to the object path :rtype: ProxyObject """ return Bus.get_bus().get_object(SERVICE, object_path, introspect=False) Change to connect to Stratisd on the system bus Stratisd is changing to use the system bus, so naturally we need to also change in order to continue working. Signed-off-by: Andy Grover <b7d524d2f5cc5aebadb6b92b08d3ab26911cde33@redhat.com># Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Miscellaneous helpful methods. """ import dbus from ._constants import SERVICE class Bus(object): """ Our bus. """ # pylint: disable=too-few-public-methods _BUS = None @staticmethod def get_bus(): # pragma: no cover """ Get our bus. """ if Bus._BUS is None: Bus._BUS = dbus.SystemBus() return Bus._BUS def get_object(object_path): # pragma: no cover """ Get an object from an object path. :param str object_path: an object path with a valid format :returns: the proxy object corresponding to the object path :rtype: ProxyObject """ return Bus.get_bus().get_object(SERVICE, object_path, introspect=False)
<commit_before># Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Miscellaneous helpful methods. """ import dbus from ._constants import SERVICE class Bus(object): """ Our bus. """ # pylint: disable=too-few-public-methods _BUS = None @staticmethod def get_bus(): # pragma: no cover """ Get our bus. """ if Bus._BUS is None: Bus._BUS = dbus.SessionBus() return Bus._BUS def get_object(object_path): # pragma: no cover """ Get an object from an object path. :param str object_path: an object path with a valid format :returns: the proxy object corresponding to the object path :rtype: ProxyObject """ return Bus.get_bus().get_object(SERVICE, object_path, introspect=False) <commit_msg>Change to connect to Stratisd on the system bus Stratisd is changing to use the system bus, so naturally we need to also change in order to continue working. Signed-off-by: Andy Grover <b7d524d2f5cc5aebadb6b92b08d3ab26911cde33@redhat.com><commit_after># Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Miscellaneous helpful methods. """ import dbus from ._constants import SERVICE class Bus(object): """ Our bus. """ # pylint: disable=too-few-public-methods _BUS = None @staticmethod def get_bus(): # pragma: no cover """ Get our bus. """ if Bus._BUS is None: Bus._BUS = dbus.SystemBus() return Bus._BUS def get_object(object_path): # pragma: no cover """ Get an object from an object path. :param str object_path: an object path with a valid format :returns: the proxy object corresponding to the object path :rtype: ProxyObject """ return Bus.get_bus().get_object(SERVICE, object_path, introspect=False)
015bc46057db405107799d7214b0fe5264843277
run_deploy_job_wr.py
run_deploy_job_wr.py
#!/usr/bin/env python import json import os import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix='juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': ['*']}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, ]) if __name__ == '__main__': main()
#!/usr/bin/env python import json import os from os.path import join import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix='juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': [ 'artifacts/machine*/*log*', 'artifacts/*.jenv', ]}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, '-v', ]) if __name__ == '__main__': main()
Fix artifact spec for deploy-job.
Fix artifact spec for deploy-job.
Python
agpl-3.0
mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju
#!/usr/bin/env python import json import os import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix='juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': ['*']}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, ]) if __name__ == '__main__': main() Fix artifact spec for deploy-job.
#!/usr/bin/env python import json import os from os.path import join import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix='juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': [ 'artifacts/machine*/*log*', 'artifacts/*.jenv', ]}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, '-v', ]) if __name__ == '__main__': main()
<commit_before>#!/usr/bin/env python import json import os import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix='juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': ['*']}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, ]) if __name__ == '__main__': main() <commit_msg>Fix artifact spec for deploy-job.<commit_after>
#!/usr/bin/env python import json import os from os.path import join import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix='juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': [ 'artifacts/machine*/*log*', 'artifacts/*.jenv', ]}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, '-v', ]) if __name__ == '__main__': main()
#!/usr/bin/env python import json import os import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix='juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': ['*']}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, ]) if __name__ == '__main__': main() Fix artifact spec for deploy-job.#!/usr/bin/env python import json import os from os.path import join import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix='juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': [ 'artifacts/machine*/*log*', 'artifacts/*.jenv', ]}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, '-v', ]) if __name__ == '__main__': main()
<commit_before>#!/usr/bin/env python import json import os import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix='juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': ['*']}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, ]) if __name__ == '__main__': main() <commit_msg>Fix artifact spec for deploy-job.<commit_after>#!/usr/bin/env python import json import os from os.path import join import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix='juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': [ 'artifacts/machine*/*log*', 'artifacts/*.jenv', ]}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, '-v', ]) if __name__ == '__main__': main()
78f8634ac7ae959cfc7f34188ce4f56156922dcb
pkgpanda/integration-tests/test_fetch.py
pkgpanda/integration-tests/test_fetch.py
import os from pkgpanda.util import expect_fs from util import run fetch_output = """\rFetching: mesos--0.22.0\rFetched: mesos--0.22.0\n""" def test_fetch(tmpdir): # NOTE: tmpdir is explicitly empty because we want to be sure a fetch. # succeeds when there isn't anything yet. # Start a simpleHTTPServer to serve the packages # fetch a couple packages assert run([ "pkgpanda", "fetch", "mesos--0.22.0", "--repository={0}".format(tmpdir), "--repository-url=file://{0}/../tests/resources/remote_repo/".format(os.getcwd()) ]) == fetch_output # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(cmaloney): Test multiple fetches on one line. # TODO(cmaloney): Test unable to fetch case.
import os from pkgpanda.util import expect_fs from util import run fetch_output = """\rFetching: mesos--0.22.0\rFetched: mesos--0.22.0\n""" def test_fetch(tmpdir): # NOTE: tmpdir is explicitly empty because we want to be sure a fetch. # succeeds when there isn't anything yet. # Start a simpleHTTPServer to serve the packages # fetch a couple packages assert run([ "pkgpanda", "fetch", "mesos--0.22.0", "--repository={0}".format(tmpdir), "--repository-url=file://{0}/../tests/resources/remote_repo/".format(os.getcwd()) ]) == fetch_output # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(cmaloney): Test multiple fetches on one line. # TODO(cmaloney): Test unable to fetch case. def test_add(tmpdir): assert run([ "pkgpanda", "add", "{0}/../tests/resources/remote_repo/packages/mesos/mesos--0.22.0.tar.xz".format(os.getcwd()), "--repository={0}".format(tmpdir), ]) == "" # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(branden): Test unable to add case.
Add integration test for `pkgpanda add`
Add integration test for `pkgpanda add`
Python
apache-2.0
mesosphere-mergebot/dcos,vishnu2kmohan/dcos,xinxian0458/dcos,GoelDeepak/dcos,jeid64/dcos,mesosphere-mergebot/dcos,kensipe/dcos,lingmann/dcos,surdy/dcos,mesosphere-mergebot/mergebot-test-dcos,amitaekbote/dcos,amitaekbote/dcos,mnaboka/dcos,surdy/dcos,lingmann/dcos,amitaekbote/dcos,vishnu2kmohan/dcos,jeid64/dcos,mesosphere-mergebot/dcos,dcos/dcos,BenWhitehead/dcos,branden/dcos,dcos/dcos,jeid64/dcos,GoelDeepak/dcos,darkonie/dcos,lingmann/dcos,mesosphere-mergebot/mergebot-test-dcos,mesosphere-mergebot/mergebot-test-dcos,xinxian0458/dcos,lingmann/dcos,vishnu2kmohan/dcos,mnaboka/dcos,GoelDeepak/dcos,xinxian0458/dcos,mellenburg/dcos,dcos/dcos,surdy/dcos,darkonie/dcos,mesosphere-mergebot/mergebot-test-dcos,BenWhitehead/dcos,mnaboka/dcos,branden/dcos,mesosphere-mergebot/dcos,asridharan/dcos,asridharan/dcos,BenWhitehead/dcos,mnaboka/dcos,BenWhitehead/dcos,kensipe/dcos,branden/dcos,kensipe/dcos,vishnu2kmohan/dcos,branden/dcos,dcos/dcos,mellenburg/dcos,GoelDeepak/dcos,darkonie/dcos,darkonie/dcos,jeid64/dcos,asridharan/dcos,darkonie/dcos,mnaboka/dcos,asridharan/dcos,kensipe/dcos,xinxian0458/dcos,dcos/dcos,mellenburg/dcos,mellenburg/dcos,amitaekbote/dcos,surdy/dcos
import os from pkgpanda.util import expect_fs from util import run fetch_output = """\rFetching: mesos--0.22.0\rFetched: mesos--0.22.0\n""" def test_fetch(tmpdir): # NOTE: tmpdir is explicitly empty because we want to be sure a fetch. # succeeds when there isn't anything yet. # Start a simpleHTTPServer to serve the packages # fetch a couple packages assert run([ "pkgpanda", "fetch", "mesos--0.22.0", "--repository={0}".format(tmpdir), "--repository-url=file://{0}/../tests/resources/remote_repo/".format(os.getcwd()) ]) == fetch_output # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(cmaloney): Test multiple fetches on one line. # TODO(cmaloney): Test unable to fetch case. Add integration test for `pkgpanda add`
import os from pkgpanda.util import expect_fs from util import run fetch_output = """\rFetching: mesos--0.22.0\rFetched: mesos--0.22.0\n""" def test_fetch(tmpdir): # NOTE: tmpdir is explicitly empty because we want to be sure a fetch. # succeeds when there isn't anything yet. # Start a simpleHTTPServer to serve the packages # fetch a couple packages assert run([ "pkgpanda", "fetch", "mesos--0.22.0", "--repository={0}".format(tmpdir), "--repository-url=file://{0}/../tests/resources/remote_repo/".format(os.getcwd()) ]) == fetch_output # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(cmaloney): Test multiple fetches on one line. # TODO(cmaloney): Test unable to fetch case. def test_add(tmpdir): assert run([ "pkgpanda", "add", "{0}/../tests/resources/remote_repo/packages/mesos/mesos--0.22.0.tar.xz".format(os.getcwd()), "--repository={0}".format(tmpdir), ]) == "" # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(branden): Test unable to add case.
<commit_before>import os from pkgpanda.util import expect_fs from util import run fetch_output = """\rFetching: mesos--0.22.0\rFetched: mesos--0.22.0\n""" def test_fetch(tmpdir): # NOTE: tmpdir is explicitly empty because we want to be sure a fetch. # succeeds when there isn't anything yet. # Start a simpleHTTPServer to serve the packages # fetch a couple packages assert run([ "pkgpanda", "fetch", "mesos--0.22.0", "--repository={0}".format(tmpdir), "--repository-url=file://{0}/../tests/resources/remote_repo/".format(os.getcwd()) ]) == fetch_output # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(cmaloney): Test multiple fetches on one line. # TODO(cmaloney): Test unable to fetch case. <commit_msg>Add integration test for `pkgpanda add`<commit_after>
import os from pkgpanda.util import expect_fs from util import run fetch_output = """\rFetching: mesos--0.22.0\rFetched: mesos--0.22.0\n""" def test_fetch(tmpdir): # NOTE: tmpdir is explicitly empty because we want to be sure a fetch. # succeeds when there isn't anything yet. # Start a simpleHTTPServer to serve the packages # fetch a couple packages assert run([ "pkgpanda", "fetch", "mesos--0.22.0", "--repository={0}".format(tmpdir), "--repository-url=file://{0}/../tests/resources/remote_repo/".format(os.getcwd()) ]) == fetch_output # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(cmaloney): Test multiple fetches on one line. # TODO(cmaloney): Test unable to fetch case. def test_add(tmpdir): assert run([ "pkgpanda", "add", "{0}/../tests/resources/remote_repo/packages/mesos/mesos--0.22.0.tar.xz".format(os.getcwd()), "--repository={0}".format(tmpdir), ]) == "" # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(branden): Test unable to add case.
import os from pkgpanda.util import expect_fs from util import run fetch_output = """\rFetching: mesos--0.22.0\rFetched: mesos--0.22.0\n""" def test_fetch(tmpdir): # NOTE: tmpdir is explicitly empty because we want to be sure a fetch. # succeeds when there isn't anything yet. # Start a simpleHTTPServer to serve the packages # fetch a couple packages assert run([ "pkgpanda", "fetch", "mesos--0.22.0", "--repository={0}".format(tmpdir), "--repository-url=file://{0}/../tests/resources/remote_repo/".format(os.getcwd()) ]) == fetch_output # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(cmaloney): Test multiple fetches on one line. # TODO(cmaloney): Test unable to fetch case. Add integration test for `pkgpanda add`import os from pkgpanda.util import expect_fs from util import run fetch_output = """\rFetching: mesos--0.22.0\rFetched: mesos--0.22.0\n""" def test_fetch(tmpdir): # NOTE: tmpdir is explicitly empty because we want to be sure a fetch. # succeeds when there isn't anything yet. # Start a simpleHTTPServer to serve the packages # fetch a couple packages assert run([ "pkgpanda", "fetch", "mesos--0.22.0", "--repository={0}".format(tmpdir), "--repository-url=file://{0}/../tests/resources/remote_repo/".format(os.getcwd()) ]) == fetch_output # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(cmaloney): Test multiple fetches on one line. # TODO(cmaloney): Test unable to fetch case. def test_add(tmpdir): assert run([ "pkgpanda", "add", "{0}/../tests/resources/remote_repo/packages/mesos/mesos--0.22.0.tar.xz".format(os.getcwd()), "--repository={0}".format(tmpdir), ]) == "" # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(branden): Test unable to add case.
<commit_before>import os from pkgpanda.util import expect_fs from util import run fetch_output = """\rFetching: mesos--0.22.0\rFetched: mesos--0.22.0\n""" def test_fetch(tmpdir): # NOTE: tmpdir is explicitly empty because we want to be sure a fetch. # succeeds when there isn't anything yet. # Start a simpleHTTPServer to serve the packages # fetch a couple packages assert run([ "pkgpanda", "fetch", "mesos--0.22.0", "--repository={0}".format(tmpdir), "--repository-url=file://{0}/../tests/resources/remote_repo/".format(os.getcwd()) ]) == fetch_output # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(cmaloney): Test multiple fetches on one line. # TODO(cmaloney): Test unable to fetch case. <commit_msg>Add integration test for `pkgpanda add`<commit_after>import os from pkgpanda.util import expect_fs from util import run fetch_output = """\rFetching: mesos--0.22.0\rFetched: mesos--0.22.0\n""" def test_fetch(tmpdir): # NOTE: tmpdir is explicitly empty because we want to be sure a fetch. # succeeds when there isn't anything yet. # Start a simpleHTTPServer to serve the packages # fetch a couple packages assert run([ "pkgpanda", "fetch", "mesos--0.22.0", "--repository={0}".format(tmpdir), "--repository-url=file://{0}/../tests/resources/remote_repo/".format(os.getcwd()) ]) == fetch_output # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(cmaloney): Test multiple fetches on one line. # TODO(cmaloney): Test unable to fetch case. def test_add(tmpdir): assert run([ "pkgpanda", "add", "{0}/../tests/resources/remote_repo/packages/mesos/mesos--0.22.0.tar.xz".format(os.getcwd()), "--repository={0}".format(tmpdir), ]) == "" # Ensure that the package at least somewhat extracted correctly. expect_fs( "{0}".format(tmpdir), { "mesos--0.22.0": ["lib", "bin_master", "bin_slave", "pkginfo.json", "bin"] }) # TODO(branden): Test unable to add case.
f1e5e2cc7fd35e0446f105d619dc01d3ba837865
byceps/blueprints/admin/party/forms.py
byceps/blueprints/admin/party/forms.py
""" byceps.blueprints.admin.party.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2019 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class UpdateForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) archived = BooleanField('archiviert') class CreateForm(UpdateForm): id = StringField('ID', validators=[Length(min=1, max=40)])
""" byceps.blueprints.admin.party.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2019 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class _BaseForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) class CreateForm(_BaseForm): id = StringField('ID', validators=[Length(min=1, max=40)]) class UpdateForm(_BaseForm): archived = BooleanField('archiviert')
Introduce base party form, limit `archived` flag to update form
Introduce base party form, limit `archived` flag to update form
Python
bsd-3-clause
m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps
""" byceps.blueprints.admin.party.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2019 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class UpdateForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) archived = BooleanField('archiviert') class CreateForm(UpdateForm): id = StringField('ID', validators=[Length(min=1, max=40)]) Introduce base party form, limit `archived` flag to update form
""" byceps.blueprints.admin.party.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2019 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class _BaseForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) class CreateForm(_BaseForm): id = StringField('ID', validators=[Length(min=1, max=40)]) class UpdateForm(_BaseForm): archived = BooleanField('archiviert')
<commit_before>""" byceps.blueprints.admin.party.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2019 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class UpdateForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) archived = BooleanField('archiviert') class CreateForm(UpdateForm): id = StringField('ID', validators=[Length(min=1, max=40)]) <commit_msg>Introduce base party form, limit `archived` flag to update form<commit_after>
""" byceps.blueprints.admin.party.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2019 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class _BaseForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) class CreateForm(_BaseForm): id = StringField('ID', validators=[Length(min=1, max=40)]) class UpdateForm(_BaseForm): archived = BooleanField('archiviert')
""" byceps.blueprints.admin.party.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2019 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class UpdateForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) archived = BooleanField('archiviert') class CreateForm(UpdateForm): id = StringField('ID', validators=[Length(min=1, max=40)]) Introduce base party form, limit `archived` flag to update form""" byceps.blueprints.admin.party.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2019 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class _BaseForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) class CreateForm(_BaseForm): id = StringField('ID', validators=[Length(min=1, max=40)]) class UpdateForm(_BaseForm): archived = BooleanField('archiviert')
<commit_before>""" byceps.blueprints.admin.party.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2019 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class UpdateForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) archived = BooleanField('archiviert') class CreateForm(UpdateForm): id = StringField('ID', validators=[Length(min=1, max=40)]) <commit_msg>Introduce base party form, limit `archived` flag to update form<commit_after>""" byceps.blueprints.admin.party.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2019 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class _BaseForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) class CreateForm(_BaseForm): id = StringField('ID', validators=[Length(min=1, max=40)]) class UpdateForm(_BaseForm): archived = BooleanField('archiviert')
f9a0b8395adcf70c23c975a06e7667d673f74ac5
stoneridge_uploader.py
stoneridge_uploader.py
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public License, # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at http://mozilla.org/MPL/2.0/. import glob import os import requests import stoneridge class StoneRidgeUploader(object): """Takes the upload files created by the collator and uploads them to the graph server """ def __init__(self): self.url = stoneridge.get_config('upload', 'url') def run(self): file_pattern = os.path.join(stoneridge.outdir, 'upload_*.json') upload_files = glob.glob(file_pattern) files = {os.path.basename(fname): open(fname, 'rb') for fname in upload_files} requests.post(self.url, files=files) for f in files.values(): f.close() @stoneridge.main def main(): parser = stoneridge.ArgumentParser() args = parser.parse_args() uploader = StoneRidgeUploader() uploader.run()
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public License, # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at http://mozilla.org/MPL/2.0/. import glob import os import requests import stoneridge class StoneRidgeUploader(object): """Takes the upload files created by the collator and uploads them to the graph server """ def __init__(self): self.url = stoneridge.get_config('upload', 'url') def run(self): file_pattern = os.path.join(stoneridge.outdir, 'upload_*.json') upload_files = glob.glob(file_pattern) if not upload_files: # Nothing to do, so forget it! return files = {os.path.basename(fname): open(fname, 'rb') for fname in upload_files} requests.post(self.url, files=files) for f in files.values(): f.close() @stoneridge.main def main(): parser = stoneridge.ArgumentParser() args = parser.parse_args() uploader = StoneRidgeUploader() uploader.run()
Fix uploader when there is nothing to upload
Fix uploader when there is nothing to upload
Python
mpl-2.0
mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public License, # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at http://mozilla.org/MPL/2.0/. import glob import os import requests import stoneridge class StoneRidgeUploader(object): """Takes the upload files created by the collator and uploads them to the graph server """ def __init__(self): self.url = stoneridge.get_config('upload', 'url') def run(self): file_pattern = os.path.join(stoneridge.outdir, 'upload_*.json') upload_files = glob.glob(file_pattern) files = {os.path.basename(fname): open(fname, 'rb') for fname in upload_files} requests.post(self.url, files=files) for f in files.values(): f.close() @stoneridge.main def main(): parser = stoneridge.ArgumentParser() args = parser.parse_args() uploader = StoneRidgeUploader() uploader.run() Fix uploader when there is nothing to upload
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public License, # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at http://mozilla.org/MPL/2.0/. import glob import os import requests import stoneridge class StoneRidgeUploader(object): """Takes the upload files created by the collator and uploads them to the graph server """ def __init__(self): self.url = stoneridge.get_config('upload', 'url') def run(self): file_pattern = os.path.join(stoneridge.outdir, 'upload_*.json') upload_files = glob.glob(file_pattern) if not upload_files: # Nothing to do, so forget it! return files = {os.path.basename(fname): open(fname, 'rb') for fname in upload_files} requests.post(self.url, files=files) for f in files.values(): f.close() @stoneridge.main def main(): parser = stoneridge.ArgumentParser() args = parser.parse_args() uploader = StoneRidgeUploader() uploader.run()
<commit_before>#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public License, # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at http://mozilla.org/MPL/2.0/. import glob import os import requests import stoneridge class StoneRidgeUploader(object): """Takes the upload files created by the collator and uploads them to the graph server """ def __init__(self): self.url = stoneridge.get_config('upload', 'url') def run(self): file_pattern = os.path.join(stoneridge.outdir, 'upload_*.json') upload_files = glob.glob(file_pattern) files = {os.path.basename(fname): open(fname, 'rb') for fname in upload_files} requests.post(self.url, files=files) for f in files.values(): f.close() @stoneridge.main def main(): parser = stoneridge.ArgumentParser() args = parser.parse_args() uploader = StoneRidgeUploader() uploader.run() <commit_msg>Fix uploader when there is nothing to upload<commit_after>
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public License, # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at http://mozilla.org/MPL/2.0/. import glob import os import requests import stoneridge class StoneRidgeUploader(object): """Takes the upload files created by the collator and uploads them to the graph server """ def __init__(self): self.url = stoneridge.get_config('upload', 'url') def run(self): file_pattern = os.path.join(stoneridge.outdir, 'upload_*.json') upload_files = glob.glob(file_pattern) if not upload_files: # Nothing to do, so forget it! return files = {os.path.basename(fname): open(fname, 'rb') for fname in upload_files} requests.post(self.url, files=files) for f in files.values(): f.close() @stoneridge.main def main(): parser = stoneridge.ArgumentParser() args = parser.parse_args() uploader = StoneRidgeUploader() uploader.run()
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public License, # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at http://mozilla.org/MPL/2.0/. import glob import os import requests import stoneridge class StoneRidgeUploader(object): """Takes the upload files created by the collator and uploads them to the graph server """ def __init__(self): self.url = stoneridge.get_config('upload', 'url') def run(self): file_pattern = os.path.join(stoneridge.outdir, 'upload_*.json') upload_files = glob.glob(file_pattern) files = {os.path.basename(fname): open(fname, 'rb') for fname in upload_files} requests.post(self.url, files=files) for f in files.values(): f.close() @stoneridge.main def main(): parser = stoneridge.ArgumentParser() args = parser.parse_args() uploader = StoneRidgeUploader() uploader.run() Fix uploader when there is nothing to upload#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public License, # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at http://mozilla.org/MPL/2.0/. import glob import os import requests import stoneridge class StoneRidgeUploader(object): """Takes the upload files created by the collator and uploads them to the graph server """ def __init__(self): self.url = stoneridge.get_config('upload', 'url') def run(self): file_pattern = os.path.join(stoneridge.outdir, 'upload_*.json') upload_files = glob.glob(file_pattern) if not upload_files: # Nothing to do, so forget it! return files = {os.path.basename(fname): open(fname, 'rb') for fname in upload_files} requests.post(self.url, files=files) for f in files.values(): f.close() @stoneridge.main def main(): parser = stoneridge.ArgumentParser() args = parser.parse_args() uploader = StoneRidgeUploader() uploader.run()
<commit_before>#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public License, # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at http://mozilla.org/MPL/2.0/. import glob import os import requests import stoneridge class StoneRidgeUploader(object): """Takes the upload files created by the collator and uploads them to the graph server """ def __init__(self): self.url = stoneridge.get_config('upload', 'url') def run(self): file_pattern = os.path.join(stoneridge.outdir, 'upload_*.json') upload_files = glob.glob(file_pattern) files = {os.path.basename(fname): open(fname, 'rb') for fname in upload_files} requests.post(self.url, files=files) for f in files.values(): f.close() @stoneridge.main def main(): parser = stoneridge.ArgumentParser() args = parser.parse_args() uploader = StoneRidgeUploader() uploader.run() <commit_msg>Fix uploader when there is nothing to upload<commit_after>#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public License, # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at http://mozilla.org/MPL/2.0/. import glob import os import requests import stoneridge class StoneRidgeUploader(object): """Takes the upload files created by the collator and uploads them to the graph server """ def __init__(self): self.url = stoneridge.get_config('upload', 'url') def run(self): file_pattern = os.path.join(stoneridge.outdir, 'upload_*.json') upload_files = glob.glob(file_pattern) if not upload_files: # Nothing to do, so forget it! return files = {os.path.basename(fname): open(fname, 'rb') for fname in upload_files} requests.post(self.url, files=files) for f in files.values(): f.close() @stoneridge.main def main(): parser = stoneridge.ArgumentParser() args = parser.parse_args() uploader = StoneRidgeUploader() uploader.run()
32d7921de5768fd74983ebff6fa37212aed24e83
all/shellenv/_win.py
all/shellenv/_win.py
# coding: utf-8 from __future__ import unicode_literals, division, absolute_import, print_function import os import locale from ._types import str_cls _sys_encoding = locale.getpreferredencoding() def get_env(shell=None): """ Return environment variables for the current user :param shell: The shell to get the env from - unused on Windows :return: A 2-element tuple: - [0] unicode string shell path - [1] env dict with keys and values as unicode strings """ shell = os.environ['ComSpec'] if not isinstance(shell, str_cls): shell = shell.decode(_sys_encoding) return (shell, dict(os.environ))
# coding: utf-8 from __future__ import unicode_literals, division, absolute_import, print_function import os import locale import sys import ctypes from ._types import str_cls _sys_encoding = locale.getpreferredencoding() kernel32 = ctypes.windll.kernel32 kernel32.GetEnvironmentStringsW.argtypes = [] kernel32.GetEnvironmentStringsW.restype = ctypes.c_void_p def get_env(shell=None): """ Return environment variables for the current user :param shell: The shell to get the env from - unused on Windows :return: A 2-element tuple: - [0] unicode string shell path - [1] env dict with keys and values as unicode strings """ shell = os.environ['ComSpec'] if not isinstance(shell, str_cls): shell = shell.decode(_sys_encoding) if sys.version_info < (3,): str_pointer = kernel32.GetEnvironmentStringsW() string = ctypes.wstring_at(str_pointer) values = {} while string != '': if string[0].isalpha(): name, value = string.split(u'=', 1) values[name] = value # Include the trailing null byte, and measure each # char as 2 bytes since Windows uses UTF-16 for # wide chars str_pointer += (len(string) + 1) * 2 string = ctypes.wstring_at(str_pointer) else: values = dict(os.environ) return (shell, values)
Use kernel32 with ST2 on Windows to get unicode environmental variable values
Use kernel32 with ST2 on Windows to get unicode environmental variable values
Python
mit
codexns/shellenv
# coding: utf-8 from __future__ import unicode_literals, division, absolute_import, print_function import os import locale from ._types import str_cls _sys_encoding = locale.getpreferredencoding() def get_env(shell=None): """ Return environment variables for the current user :param shell: The shell to get the env from - unused on Windows :return: A 2-element tuple: - [0] unicode string shell path - [1] env dict with keys and values as unicode strings """ shell = os.environ['ComSpec'] if not isinstance(shell, str_cls): shell = shell.decode(_sys_encoding) return (shell, dict(os.environ)) Use kernel32 with ST2 on Windows to get unicode environmental variable values
# coding: utf-8 from __future__ import unicode_literals, division, absolute_import, print_function import os import locale import sys import ctypes from ._types import str_cls _sys_encoding = locale.getpreferredencoding() kernel32 = ctypes.windll.kernel32 kernel32.GetEnvironmentStringsW.argtypes = [] kernel32.GetEnvironmentStringsW.restype = ctypes.c_void_p def get_env(shell=None): """ Return environment variables for the current user :param shell: The shell to get the env from - unused on Windows :return: A 2-element tuple: - [0] unicode string shell path - [1] env dict with keys and values as unicode strings """ shell = os.environ['ComSpec'] if not isinstance(shell, str_cls): shell = shell.decode(_sys_encoding) if sys.version_info < (3,): str_pointer = kernel32.GetEnvironmentStringsW() string = ctypes.wstring_at(str_pointer) values = {} while string != '': if string[0].isalpha(): name, value = string.split(u'=', 1) values[name] = value # Include the trailing null byte, and measure each # char as 2 bytes since Windows uses UTF-16 for # wide chars str_pointer += (len(string) + 1) * 2 string = ctypes.wstring_at(str_pointer) else: values = dict(os.environ) return (shell, values)
<commit_before># coding: utf-8 from __future__ import unicode_literals, division, absolute_import, print_function import os import locale from ._types import str_cls _sys_encoding = locale.getpreferredencoding() def get_env(shell=None): """ Return environment variables for the current user :param shell: The shell to get the env from - unused on Windows :return: A 2-element tuple: - [0] unicode string shell path - [1] env dict with keys and values as unicode strings """ shell = os.environ['ComSpec'] if not isinstance(shell, str_cls): shell = shell.decode(_sys_encoding) return (shell, dict(os.environ)) <commit_msg>Use kernel32 with ST2 on Windows to get unicode environmental variable values<commit_after>
# coding: utf-8 from __future__ import unicode_literals, division, absolute_import, print_function import os import locale import sys import ctypes from ._types import str_cls _sys_encoding = locale.getpreferredencoding() kernel32 = ctypes.windll.kernel32 kernel32.GetEnvironmentStringsW.argtypes = [] kernel32.GetEnvironmentStringsW.restype = ctypes.c_void_p def get_env(shell=None): """ Return environment variables for the current user :param shell: The shell to get the env from - unused on Windows :return: A 2-element tuple: - [0] unicode string shell path - [1] env dict with keys and values as unicode strings """ shell = os.environ['ComSpec'] if not isinstance(shell, str_cls): shell = shell.decode(_sys_encoding) if sys.version_info < (3,): str_pointer = kernel32.GetEnvironmentStringsW() string = ctypes.wstring_at(str_pointer) values = {} while string != '': if string[0].isalpha(): name, value = string.split(u'=', 1) values[name] = value # Include the trailing null byte, and measure each # char as 2 bytes since Windows uses UTF-16 for # wide chars str_pointer += (len(string) + 1) * 2 string = ctypes.wstring_at(str_pointer) else: values = dict(os.environ) return (shell, values)
# coding: utf-8 from __future__ import unicode_literals, division, absolute_import, print_function import os import locale from ._types import str_cls _sys_encoding = locale.getpreferredencoding() def get_env(shell=None): """ Return environment variables for the current user :param shell: The shell to get the env from - unused on Windows :return: A 2-element tuple: - [0] unicode string shell path - [1] env dict with keys and values as unicode strings """ shell = os.environ['ComSpec'] if not isinstance(shell, str_cls): shell = shell.decode(_sys_encoding) return (shell, dict(os.environ)) Use kernel32 with ST2 on Windows to get unicode environmental variable values# coding: utf-8 from __future__ import unicode_literals, division, absolute_import, print_function import os import locale import sys import ctypes from ._types import str_cls _sys_encoding = locale.getpreferredencoding() kernel32 = ctypes.windll.kernel32 kernel32.GetEnvironmentStringsW.argtypes = [] kernel32.GetEnvironmentStringsW.restype = ctypes.c_void_p def get_env(shell=None): """ Return environment variables for the current user :param shell: The shell to get the env from - unused on Windows :return: A 2-element tuple: - [0] unicode string shell path - [1] env dict with keys and values as unicode strings """ shell = os.environ['ComSpec'] if not isinstance(shell, str_cls): shell = shell.decode(_sys_encoding) if sys.version_info < (3,): str_pointer = kernel32.GetEnvironmentStringsW() string = ctypes.wstring_at(str_pointer) values = {} while string != '': if string[0].isalpha(): name, value = string.split(u'=', 1) values[name] = value # Include the trailing null byte, and measure each # char as 2 bytes since Windows uses UTF-16 for # wide chars str_pointer += (len(string) + 1) * 2 string = ctypes.wstring_at(str_pointer) else: values = dict(os.environ) return (shell, values)
<commit_before># coding: utf-8 from __future__ import unicode_literals, division, absolute_import, print_function import os import locale from ._types import str_cls _sys_encoding = locale.getpreferredencoding() def get_env(shell=None): """ Return environment variables for the current user :param shell: The shell to get the env from - unused on Windows :return: A 2-element tuple: - [0] unicode string shell path - [1] env dict with keys and values as unicode strings """ shell = os.environ['ComSpec'] if not isinstance(shell, str_cls): shell = shell.decode(_sys_encoding) return (shell, dict(os.environ)) <commit_msg>Use kernel32 with ST2 on Windows to get unicode environmental variable values<commit_after># coding: utf-8 from __future__ import unicode_literals, division, absolute_import, print_function import os import locale import sys import ctypes from ._types import str_cls _sys_encoding = locale.getpreferredencoding() kernel32 = ctypes.windll.kernel32 kernel32.GetEnvironmentStringsW.argtypes = [] kernel32.GetEnvironmentStringsW.restype = ctypes.c_void_p def get_env(shell=None): """ Return environment variables for the current user :param shell: The shell to get the env from - unused on Windows :return: A 2-element tuple: - [0] unicode string shell path - [1] env dict with keys and values as unicode strings """ shell = os.environ['ComSpec'] if not isinstance(shell, str_cls): shell = shell.decode(_sys_encoding) if sys.version_info < (3,): str_pointer = kernel32.GetEnvironmentStringsW() string = ctypes.wstring_at(str_pointer) values = {} while string != '': if string[0].isalpha(): name, value = string.split(u'=', 1) values[name] = value # Include the trailing null byte, and measure each # char as 2 bytes since Windows uses UTF-16 for # wide chars str_pointer += (len(string) + 1) * 2 string = ctypes.wstring_at(str_pointer) else: values = dict(os.environ) return (shell, values)
59fc12548422ecaa861f810d0e40b631801e2de0
interface/backend/static/tests.py
interface/backend/static/tests.py
from django.test import TestCase from django.urls import reverse class SmokeTest(TestCase): def test_landing(self): url = reverse('static:home') resp = self.client.get(url) self.assertContains(resp, 'Concept to Clinic') self.assertEqual(resp.status_code, 200)
from django.test import TestCase from django.urls import reverse class SmokeTest(TestCase): def test_landing(self): url = reverse('static:open_image') resp = self.client.get(url) self.assertContains(resp, 'Concept to Clinic') self.assertEqual(resp.status_code, 200)
Fix failing test caused by view rename
Fix failing test caused by view rename
Python
mit
vessemer/concept-to-clinic,antonow/concept-to-clinic,antonow/concept-to-clinic,antonow/concept-to-clinic,antonow/concept-to-clinic,vessemer/concept-to-clinic,vessemer/concept-to-clinic,vessemer/concept-to-clinic
from django.test import TestCase from django.urls import reverse class SmokeTest(TestCase): def test_landing(self): url = reverse('static:home') resp = self.client.get(url) self.assertContains(resp, 'Concept to Clinic') self.assertEqual(resp.status_code, 200) Fix failing test caused by view rename
from django.test import TestCase from django.urls import reverse class SmokeTest(TestCase): def test_landing(self): url = reverse('static:open_image') resp = self.client.get(url) self.assertContains(resp, 'Concept to Clinic') self.assertEqual(resp.status_code, 200)
<commit_before>from django.test import TestCase from django.urls import reverse class SmokeTest(TestCase): def test_landing(self): url = reverse('static:home') resp = self.client.get(url) self.assertContains(resp, 'Concept to Clinic') self.assertEqual(resp.status_code, 200) <commit_msg>Fix failing test caused by view rename<commit_after>
from django.test import TestCase from django.urls import reverse class SmokeTest(TestCase): def test_landing(self): url = reverse('static:open_image') resp = self.client.get(url) self.assertContains(resp, 'Concept to Clinic') self.assertEqual(resp.status_code, 200)
from django.test import TestCase from django.urls import reverse class SmokeTest(TestCase): def test_landing(self): url = reverse('static:home') resp = self.client.get(url) self.assertContains(resp, 'Concept to Clinic') self.assertEqual(resp.status_code, 200) Fix failing test caused by view renamefrom django.test import TestCase from django.urls import reverse class SmokeTest(TestCase): def test_landing(self): url = reverse('static:open_image') resp = self.client.get(url) self.assertContains(resp, 'Concept to Clinic') self.assertEqual(resp.status_code, 200)
<commit_before>from django.test import TestCase from django.urls import reverse class SmokeTest(TestCase): def test_landing(self): url = reverse('static:home') resp = self.client.get(url) self.assertContains(resp, 'Concept to Clinic') self.assertEqual(resp.status_code, 200) <commit_msg>Fix failing test caused by view rename<commit_after>from django.test import TestCase from django.urls import reverse class SmokeTest(TestCase): def test_landing(self): url = reverse('static:open_image') resp = self.client.get(url) self.assertContains(resp, 'Concept to Clinic') self.assertEqual(resp.status_code, 200)
f4aad7a704628e2ecdbc2222e71998bb71cf37ec
wake.py
wake.py
from been.couch import CouchStore from flask import Flask, render_template app = Flask(__name__) app.jinja_env.trim_blocks = True store = CouchStore() store.load() @app.route('/') def wake(): return render_template('stream.html', events=store.events()) if __name__ == '__main__': app.run(debug=True)
#!/usr/bin/env python from been.couch import CouchStore from flask import Flask, render_template app = Flask(__name__) app.jinja_env.trim_blocks = True store = CouchStore() store.load() @app.route('/') def wake(): return render_template('stream.html', events=store.events()) if __name__ == '__main__': app.run(debug=True)
Add shebang to main script.
Add shebang to main script.
Python
bsd-3-clause
chromakode/wake
from been.couch import CouchStore from flask import Flask, render_template app = Flask(__name__) app.jinja_env.trim_blocks = True store = CouchStore() store.load() @app.route('/') def wake(): return render_template('stream.html', events=store.events()) if __name__ == '__main__': app.run(debug=True) Add shebang to main script.
#!/usr/bin/env python from been.couch import CouchStore from flask import Flask, render_template app = Flask(__name__) app.jinja_env.trim_blocks = True store = CouchStore() store.load() @app.route('/') def wake(): return render_template('stream.html', events=store.events()) if __name__ == '__main__': app.run(debug=True)
<commit_before>from been.couch import CouchStore from flask import Flask, render_template app = Flask(__name__) app.jinja_env.trim_blocks = True store = CouchStore() store.load() @app.route('/') def wake(): return render_template('stream.html', events=store.events()) if __name__ == '__main__': app.run(debug=True) <commit_msg>Add shebang to main script.<commit_after>
#!/usr/bin/env python from been.couch import CouchStore from flask import Flask, render_template app = Flask(__name__) app.jinja_env.trim_blocks = True store = CouchStore() store.load() @app.route('/') def wake(): return render_template('stream.html', events=store.events()) if __name__ == '__main__': app.run(debug=True)
from been.couch import CouchStore from flask import Flask, render_template app = Flask(__name__) app.jinja_env.trim_blocks = True store = CouchStore() store.load() @app.route('/') def wake(): return render_template('stream.html', events=store.events()) if __name__ == '__main__': app.run(debug=True) Add shebang to main script.#!/usr/bin/env python from been.couch import CouchStore from flask import Flask, render_template app = Flask(__name__) app.jinja_env.trim_blocks = True store = CouchStore() store.load() @app.route('/') def wake(): return render_template('stream.html', events=store.events()) if __name__ == '__main__': app.run(debug=True)
<commit_before>from been.couch import CouchStore from flask import Flask, render_template app = Flask(__name__) app.jinja_env.trim_blocks = True store = CouchStore() store.load() @app.route('/') def wake(): return render_template('stream.html', events=store.events()) if __name__ == '__main__': app.run(debug=True) <commit_msg>Add shebang to main script.<commit_after>#!/usr/bin/env python from been.couch import CouchStore from flask import Flask, render_template app = Flask(__name__) app.jinja_env.trim_blocks = True store = CouchStore() store.load() @app.route('/') def wake(): return render_template('stream.html', events=store.events()) if __name__ == '__main__': app.run(debug=True)
3eaf93f2ecee68fafa1ff4f75d4c6e7f09a37043
api/streams/views.py
api/streams/views.py
from api.streams.models import StreamConfiguration from django.http import JsonResponse from django.http.request import HttpRequest import requests def get_stream_status(request: HttpRequest, stream_slug: str): stream = StreamConfiguration.objects.get(slug=stream_slug) r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream)) r.raise_for_status() return JsonResponse(r.json())
from api.streams.models import StreamConfiguration from django.http import JsonResponse from django.http.request import HttpRequest import requests def get_stream_status(request: HttpRequest, stream_slug: str): stream = StreamConfiguration.objects.get(slug=stream_slug) r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream), timeout=5) r.raise_for_status() return JsonResponse(r.json())
Add timeout to Icecast status request
Add timeout to Icecast status request
Python
mit
urfonline/api,urfonline/api,urfonline/api
from api.streams.models import StreamConfiguration from django.http import JsonResponse from django.http.request import HttpRequest import requests def get_stream_status(request: HttpRequest, stream_slug: str): stream = StreamConfiguration.objects.get(slug=stream_slug) r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream)) r.raise_for_status() return JsonResponse(r.json()) Add timeout to Icecast status request
from api.streams.models import StreamConfiguration from django.http import JsonResponse from django.http.request import HttpRequest import requests def get_stream_status(request: HttpRequest, stream_slug: str): stream = StreamConfiguration.objects.get(slug=stream_slug) r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream), timeout=5) r.raise_for_status() return JsonResponse(r.json())
<commit_before>from api.streams.models import StreamConfiguration from django.http import JsonResponse from django.http.request import HttpRequest import requests def get_stream_status(request: HttpRequest, stream_slug: str): stream = StreamConfiguration.objects.get(slug=stream_slug) r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream)) r.raise_for_status() return JsonResponse(r.json()) <commit_msg>Add timeout to Icecast status request<commit_after>
from api.streams.models import StreamConfiguration from django.http import JsonResponse from django.http.request import HttpRequest import requests def get_stream_status(request: HttpRequest, stream_slug: str): stream = StreamConfiguration.objects.get(slug=stream_slug) r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream), timeout=5) r.raise_for_status() return JsonResponse(r.json())
from api.streams.models import StreamConfiguration from django.http import JsonResponse from django.http.request import HttpRequest import requests def get_stream_status(request: HttpRequest, stream_slug: str): stream = StreamConfiguration.objects.get(slug=stream_slug) r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream)) r.raise_for_status() return JsonResponse(r.json()) Add timeout to Icecast status requestfrom api.streams.models import StreamConfiguration from django.http import JsonResponse from django.http.request import HttpRequest import requests def get_stream_status(request: HttpRequest, stream_slug: str): stream = StreamConfiguration.objects.get(slug=stream_slug) r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream), timeout=5) r.raise_for_status() return JsonResponse(r.json())
<commit_before>from api.streams.models import StreamConfiguration from django.http import JsonResponse from django.http.request import HttpRequest import requests def get_stream_status(request: HttpRequest, stream_slug: str): stream = StreamConfiguration.objects.get(slug=stream_slug) r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream)) r.raise_for_status() return JsonResponse(r.json()) <commit_msg>Add timeout to Icecast status request<commit_after>from api.streams.models import StreamConfiguration from django.http import JsonResponse from django.http.request import HttpRequest import requests def get_stream_status(request: HttpRequest, stream_slug: str): stream = StreamConfiguration.objects.get(slug=stream_slug) r = requests.get('http://{stream.host}:{stream.port}/status-json.xsl'.format(stream=stream), timeout=5) r.raise_for_status() return JsonResponse(r.json())
5f05c1f6e06594328a2b328f6b995288707f593c
src/winton_kafka_streams/kafka_stream.py
src/winton_kafka_streams/kafka_stream.py
""" Primary entrypoint for applications wishing to implement Python Kafka Streams """ import logging import confluent_kafka as kafka log = logging.getLogger(__name__) class KafkaStream(object): """ Encapsulates stream graph processing units """ def __init__(self, topology, kafka_config): self.topology = topology self.kafka_config = kafka_config self.consumer = None def start(self): """ Begin streaming the data across the topology """ self.consumer = kafka.Consumer({'bootstrap.servers': self.kafka_config.BOOTSTRAP_SERVERS, 'group.id': 'test'}) #, 'group.id': 'testgroup', #'default.topic.config': {'auto.offset.reset': 'smallest'}}) log.debug('Subscribing to topics %s', self.topology.kafka_topics) self.consumer.subscribe(self.topology.kafka_topics) log.debug('Subscribed to topics') self.run() def run(self): running = True while running: msg = self.consumer.poll() if not msg.error(): print('Received message: %s' % msg.value().decode('utf-8')) elif msg.error().code() != kafka.KafkaError._PARTITION_EOF: print(msg.error()) running = False self.consumer.close()
""" Primary entrypoint for applications wishing to implement Python Kafka Streams """ import logging import confluent_kafka as kafka log = logging.getLogger(__name__) class KafkaStream(object): """ Encapsulates stream graph processing units """ def __init__(self, topology, kafka_config): self.topology = topology self.kafka_config = kafka_config self.consumer = None def start(self): """ Begin streaming the data across the topology """ self.consumer = kafka.Consumer({'bootstrap.servers': self.kafka_config.BOOTSTRAP_SERVERS, 'group.id': 'test'}) #, 'group.id': 'testgroup', #'default.topic.config': {'auto.offset.reset': 'smallest'}}) log.debug('Subscribing to topics %s', self.topology.kafka_topics) self.consumer.subscribe(self.topology.kafka_topics) log.debug('Subscribed to topics') self.run() def run(self): running = True while running: msg = self.consumer.poll() if msg is None: continue elif not msg.error(): print('Received message: %s' % msg.value().decode('utf-8')) elif msg.error().code() != kafka.KafkaError._PARTITION_EOF: print(msg.error()) running = False self.consumer.close()
Handle None returned from poll()
Handle None returned from poll()
Python
apache-2.0
wintoncode/winton-kafka-streams
""" Primary entrypoint for applications wishing to implement Python Kafka Streams """ import logging import confluent_kafka as kafka log = logging.getLogger(__name__) class KafkaStream(object): """ Encapsulates stream graph processing units """ def __init__(self, topology, kafka_config): self.topology = topology self.kafka_config = kafka_config self.consumer = None def start(self): """ Begin streaming the data across the topology """ self.consumer = kafka.Consumer({'bootstrap.servers': self.kafka_config.BOOTSTRAP_SERVERS, 'group.id': 'test'}) #, 'group.id': 'testgroup', #'default.topic.config': {'auto.offset.reset': 'smallest'}}) log.debug('Subscribing to topics %s', self.topology.kafka_topics) self.consumer.subscribe(self.topology.kafka_topics) log.debug('Subscribed to topics') self.run() def run(self): running = True while running: msg = self.consumer.poll() if not msg.error(): print('Received message: %s' % msg.value().decode('utf-8')) elif msg.error().code() != kafka.KafkaError._PARTITION_EOF: print(msg.error()) running = False self.consumer.close() Handle None returned from poll()
""" Primary entrypoint for applications wishing to implement Python Kafka Streams """ import logging import confluent_kafka as kafka log = logging.getLogger(__name__) class KafkaStream(object): """ Encapsulates stream graph processing units """ def __init__(self, topology, kafka_config): self.topology = topology self.kafka_config = kafka_config self.consumer = None def start(self): """ Begin streaming the data across the topology """ self.consumer = kafka.Consumer({'bootstrap.servers': self.kafka_config.BOOTSTRAP_SERVERS, 'group.id': 'test'}) #, 'group.id': 'testgroup', #'default.topic.config': {'auto.offset.reset': 'smallest'}}) log.debug('Subscribing to topics %s', self.topology.kafka_topics) self.consumer.subscribe(self.topology.kafka_topics) log.debug('Subscribed to topics') self.run() def run(self): running = True while running: msg = self.consumer.poll() if msg is None: continue elif not msg.error(): print('Received message: %s' % msg.value().decode('utf-8')) elif msg.error().code() != kafka.KafkaError._PARTITION_EOF: print(msg.error()) running = False self.consumer.close()
<commit_before>""" Primary entrypoint for applications wishing to implement Python Kafka Streams """ import logging import confluent_kafka as kafka log = logging.getLogger(__name__) class KafkaStream(object): """ Encapsulates stream graph processing units """ def __init__(self, topology, kafka_config): self.topology = topology self.kafka_config = kafka_config self.consumer = None def start(self): """ Begin streaming the data across the topology """ self.consumer = kafka.Consumer({'bootstrap.servers': self.kafka_config.BOOTSTRAP_SERVERS, 'group.id': 'test'}) #, 'group.id': 'testgroup', #'default.topic.config': {'auto.offset.reset': 'smallest'}}) log.debug('Subscribing to topics %s', self.topology.kafka_topics) self.consumer.subscribe(self.topology.kafka_topics) log.debug('Subscribed to topics') self.run() def run(self): running = True while running: msg = self.consumer.poll() if not msg.error(): print('Received message: %s' % msg.value().decode('utf-8')) elif msg.error().code() != kafka.KafkaError._PARTITION_EOF: print(msg.error()) running = False self.consumer.close() <commit_msg>Handle None returned from poll()<commit_after>
""" Primary entrypoint for applications wishing to implement Python Kafka Streams """ import logging import confluent_kafka as kafka log = logging.getLogger(__name__) class KafkaStream(object): """ Encapsulates stream graph processing units """ def __init__(self, topology, kafka_config): self.topology = topology self.kafka_config = kafka_config self.consumer = None def start(self): """ Begin streaming the data across the topology """ self.consumer = kafka.Consumer({'bootstrap.servers': self.kafka_config.BOOTSTRAP_SERVERS, 'group.id': 'test'}) #, 'group.id': 'testgroup', #'default.topic.config': {'auto.offset.reset': 'smallest'}}) log.debug('Subscribing to topics %s', self.topology.kafka_topics) self.consumer.subscribe(self.topology.kafka_topics) log.debug('Subscribed to topics') self.run() def run(self): running = True while running: msg = self.consumer.poll() if msg is None: continue elif not msg.error(): print('Received message: %s' % msg.value().decode('utf-8')) elif msg.error().code() != kafka.KafkaError._PARTITION_EOF: print(msg.error()) running = False self.consumer.close()
""" Primary entrypoint for applications wishing to implement Python Kafka Streams """ import logging import confluent_kafka as kafka log = logging.getLogger(__name__) class KafkaStream(object): """ Encapsulates stream graph processing units """ def __init__(self, topology, kafka_config): self.topology = topology self.kafka_config = kafka_config self.consumer = None def start(self): """ Begin streaming the data across the topology """ self.consumer = kafka.Consumer({'bootstrap.servers': self.kafka_config.BOOTSTRAP_SERVERS, 'group.id': 'test'}) #, 'group.id': 'testgroup', #'default.topic.config': {'auto.offset.reset': 'smallest'}}) log.debug('Subscribing to topics %s', self.topology.kafka_topics) self.consumer.subscribe(self.topology.kafka_topics) log.debug('Subscribed to topics') self.run() def run(self): running = True while running: msg = self.consumer.poll() if not msg.error(): print('Received message: %s' % msg.value().decode('utf-8')) elif msg.error().code() != kafka.KafkaError._PARTITION_EOF: print(msg.error()) running = False self.consumer.close() Handle None returned from poll()""" Primary entrypoint for applications wishing to implement Python Kafka Streams """ import logging import confluent_kafka as kafka log = logging.getLogger(__name__) class KafkaStream(object): """ Encapsulates stream graph processing units """ def __init__(self, topology, kafka_config): self.topology = topology self.kafka_config = kafka_config self.consumer = None def start(self): """ Begin streaming the data across the topology """ self.consumer = kafka.Consumer({'bootstrap.servers': self.kafka_config.BOOTSTRAP_SERVERS, 'group.id': 'test'}) #, 'group.id': 'testgroup', #'default.topic.config': {'auto.offset.reset': 'smallest'}}) log.debug('Subscribing to topics %s', self.topology.kafka_topics) self.consumer.subscribe(self.topology.kafka_topics) log.debug('Subscribed to topics') self.run() def run(self): running = True while running: msg = self.consumer.poll() if msg is None: continue elif not msg.error(): print('Received message: %s' % msg.value().decode('utf-8')) elif msg.error().code() != kafka.KafkaError._PARTITION_EOF: print(msg.error()) running = False self.consumer.close()
<commit_before>""" Primary entrypoint for applications wishing to implement Python Kafka Streams """ import logging import confluent_kafka as kafka log = logging.getLogger(__name__) class KafkaStream(object): """ Encapsulates stream graph processing units """ def __init__(self, topology, kafka_config): self.topology = topology self.kafka_config = kafka_config self.consumer = None def start(self): """ Begin streaming the data across the topology """ self.consumer = kafka.Consumer({'bootstrap.servers': self.kafka_config.BOOTSTRAP_SERVERS, 'group.id': 'test'}) #, 'group.id': 'testgroup', #'default.topic.config': {'auto.offset.reset': 'smallest'}}) log.debug('Subscribing to topics %s', self.topology.kafka_topics) self.consumer.subscribe(self.topology.kafka_topics) log.debug('Subscribed to topics') self.run() def run(self): running = True while running: msg = self.consumer.poll() if not msg.error(): print('Received message: %s' % msg.value().decode('utf-8')) elif msg.error().code() != kafka.KafkaError._PARTITION_EOF: print(msg.error()) running = False self.consumer.close() <commit_msg>Handle None returned from poll()<commit_after>""" Primary entrypoint for applications wishing to implement Python Kafka Streams """ import logging import confluent_kafka as kafka log = logging.getLogger(__name__) class KafkaStream(object): """ Encapsulates stream graph processing units """ def __init__(self, topology, kafka_config): self.topology = topology self.kafka_config = kafka_config self.consumer = None def start(self): """ Begin streaming the data across the topology """ self.consumer = kafka.Consumer({'bootstrap.servers': self.kafka_config.BOOTSTRAP_SERVERS, 'group.id': 'test'}) #, 'group.id': 'testgroup', #'default.topic.config': {'auto.offset.reset': 'smallest'}}) log.debug('Subscribing to topics %s', self.topology.kafka_topics) self.consumer.subscribe(self.topology.kafka_topics) log.debug('Subscribed to topics') self.run() def run(self): running = True while running: msg = self.consumer.poll() if msg is None: continue elif not msg.error(): print('Received message: %s' % msg.value().decode('utf-8')) elif msg.error().code() != kafka.KafkaError._PARTITION_EOF: print(msg.error()) running = False self.consumer.close()
6adc30c9db58b2372c3d38f516f39faee3b87393
tests/test_settings.py
tests/test_settings.py
from __future__ import unicode_literals from os.path import dirname MIU_TEST_ROOT = dirname(__file__) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = []
from __future__ import unicode_literals from os.path import dirname, abspath, join BASE_DIR = dirname(abspath(__file__)) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ join(BASE_DIR, 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz', 'django.contrib.messages.context_processors.messages', ], }, }, ] STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = []
Configure TEMPLATES in test settings.
Configure TEMPLATES in test settings.
Python
bsd-3-clause
carljm/django-markitup,carljm/django-markitup,zsiciarz/django-markitup,zsiciarz/django-markitup,carljm/django-markitup,zsiciarz/django-markitup
from __future__ import unicode_literals from os.path import dirname MIU_TEST_ROOT = dirname(__file__) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = [] Configure TEMPLATES in test settings.
from __future__ import unicode_literals from os.path import dirname, abspath, join BASE_DIR = dirname(abspath(__file__)) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ join(BASE_DIR, 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz', 'django.contrib.messages.context_processors.messages', ], }, }, ] STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = []
<commit_before>from __future__ import unicode_literals from os.path import dirname MIU_TEST_ROOT = dirname(__file__) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = [] <commit_msg>Configure TEMPLATES in test settings.<commit_after>
from __future__ import unicode_literals from os.path import dirname, abspath, join BASE_DIR = dirname(abspath(__file__)) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ join(BASE_DIR, 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz', 'django.contrib.messages.context_processors.messages', ], }, }, ] STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = []
from __future__ import unicode_literals from os.path import dirname MIU_TEST_ROOT = dirname(__file__) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = [] Configure TEMPLATES in test settings.from __future__ import unicode_literals from os.path import dirname, abspath, join BASE_DIR = dirname(abspath(__file__)) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ join(BASE_DIR, 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz', 'django.contrib.messages.context_processors.messages', ], }, }, ] STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = []
<commit_before>from __future__ import unicode_literals from os.path import dirname MIU_TEST_ROOT = dirname(__file__) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = [] <commit_msg>Configure TEMPLATES in test settings.<commit_after>from __future__ import unicode_literals from os.path import dirname, abspath, join BASE_DIR = dirname(abspath(__file__)) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ join(BASE_DIR, 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz', 'django.contrib.messages.context_processors.messages', ], }, }, ] STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = []
655f46a10245de0b7f4d9727f816815c6493d230
tests/test_settings.py
tests/test_settings.py
# -*- coding:utf-8 -*- import os try: import unittest2 as unittest except ImportError: import unittest # NOQA from sigal.settings import read_settings class TestSettings(unittest.TestCase): "Read a settings file and check that the configuration is well done." def setUp(self): "Read the sample config file" self.path = os.path.abspath(os.path.dirname(__file__)) default_conf = os.path.join(self.path, 'sample', 'sigal.conf.py') self.settings = read_settings(default_conf) def test_sizes(self): "Test that image sizes are correctly read" self.assertTupleEqual(self.settings['img_size'], (640, 480)) self.assertTupleEqual(self.settings['thumb_size'], (200, 150)) def test_settings(self): self.assertEqual(self.settings['thumb_suffix'], '.tn')
# -*- coding:utf-8 -*- import os try: import unittest2 as unittest except ImportError: import unittest # NOQA from sigal.settings import read_settings, get_thumb class TestSettings(unittest.TestCase): "Read a settings file and check that the configuration is well done." def setUp(self): "Read the sample config file" self.path = os.path.abspath(os.path.dirname(__file__)) default_conf = os.path.join(self.path, 'sample', 'sigal.conf.py') self.settings = read_settings(default_conf) def test_sizes(self): "Test that image sizes are correctly read" self.assertTupleEqual(self.settings['img_size'], (640, 480)) self.assertTupleEqual(self.settings['thumb_size'], (200, 150)) def test_settings(self): self.assertEqual(self.settings['thumb_suffix'], '.tn') def test_thumb(self): self.assertEqual(get_thumb(self.settings, 'example.jpg'), 'thumbnails/example.tn.jpg') self.assertEqual(get_thumb(self.settings, 'test/example.jpg'), 'test/thumbnails/example.tn.jpg')
Add a test for the get_thumb function.
Add a test for the get_thumb function.
Python
mit
jdn06/sigal,Ferada/sigal,elaOnMars/sigal,kontza/sigal,kontza/sigal,t-animal/sigal,kontza/sigal,franek/sigal,cbosdo/sigal,cbosdo/sigal,saimn/sigal,saimn/sigal,jasuarez/sigal,xouillet/sigal,t-animal/sigal,muggenhor/sigal,saimn/sigal,elaOnMars/sigal,Ferada/sigal,franek/sigal,muggenhor/sigal,cbosdo/sigal,jdn06/sigal,xouillet/sigal,jasuarez/sigal,Ferada/sigal,t-animal/sigal,jdn06/sigal,xouillet/sigal,jasuarez/sigal
# -*- coding:utf-8 -*- import os try: import unittest2 as unittest except ImportError: import unittest # NOQA from sigal.settings import read_settings class TestSettings(unittest.TestCase): "Read a settings file and check that the configuration is well done." def setUp(self): "Read the sample config file" self.path = os.path.abspath(os.path.dirname(__file__)) default_conf = os.path.join(self.path, 'sample', 'sigal.conf.py') self.settings = read_settings(default_conf) def test_sizes(self): "Test that image sizes are correctly read" self.assertTupleEqual(self.settings['img_size'], (640, 480)) self.assertTupleEqual(self.settings['thumb_size'], (200, 150)) def test_settings(self): self.assertEqual(self.settings['thumb_suffix'], '.tn') Add a test for the get_thumb function.
# -*- coding:utf-8 -*- import os try: import unittest2 as unittest except ImportError: import unittest # NOQA from sigal.settings import read_settings, get_thumb class TestSettings(unittest.TestCase): "Read a settings file and check that the configuration is well done." def setUp(self): "Read the sample config file" self.path = os.path.abspath(os.path.dirname(__file__)) default_conf = os.path.join(self.path, 'sample', 'sigal.conf.py') self.settings = read_settings(default_conf) def test_sizes(self): "Test that image sizes are correctly read" self.assertTupleEqual(self.settings['img_size'], (640, 480)) self.assertTupleEqual(self.settings['thumb_size'], (200, 150)) def test_settings(self): self.assertEqual(self.settings['thumb_suffix'], '.tn') def test_thumb(self): self.assertEqual(get_thumb(self.settings, 'example.jpg'), 'thumbnails/example.tn.jpg') self.assertEqual(get_thumb(self.settings, 'test/example.jpg'), 'test/thumbnails/example.tn.jpg')
<commit_before># -*- coding:utf-8 -*- import os try: import unittest2 as unittest except ImportError: import unittest # NOQA from sigal.settings import read_settings class TestSettings(unittest.TestCase): "Read a settings file and check that the configuration is well done." def setUp(self): "Read the sample config file" self.path = os.path.abspath(os.path.dirname(__file__)) default_conf = os.path.join(self.path, 'sample', 'sigal.conf.py') self.settings = read_settings(default_conf) def test_sizes(self): "Test that image sizes are correctly read" self.assertTupleEqual(self.settings['img_size'], (640, 480)) self.assertTupleEqual(self.settings['thumb_size'], (200, 150)) def test_settings(self): self.assertEqual(self.settings['thumb_suffix'], '.tn') <commit_msg>Add a test for the get_thumb function.<commit_after>
# -*- coding:utf-8 -*- import os try: import unittest2 as unittest except ImportError: import unittest # NOQA from sigal.settings import read_settings, get_thumb class TestSettings(unittest.TestCase): "Read a settings file and check that the configuration is well done." def setUp(self): "Read the sample config file" self.path = os.path.abspath(os.path.dirname(__file__)) default_conf = os.path.join(self.path, 'sample', 'sigal.conf.py') self.settings = read_settings(default_conf) def test_sizes(self): "Test that image sizes are correctly read" self.assertTupleEqual(self.settings['img_size'], (640, 480)) self.assertTupleEqual(self.settings['thumb_size'], (200, 150)) def test_settings(self): self.assertEqual(self.settings['thumb_suffix'], '.tn') def test_thumb(self): self.assertEqual(get_thumb(self.settings, 'example.jpg'), 'thumbnails/example.tn.jpg') self.assertEqual(get_thumb(self.settings, 'test/example.jpg'), 'test/thumbnails/example.tn.jpg')
# -*- coding:utf-8 -*- import os try: import unittest2 as unittest except ImportError: import unittest # NOQA from sigal.settings import read_settings class TestSettings(unittest.TestCase): "Read a settings file and check that the configuration is well done." def setUp(self): "Read the sample config file" self.path = os.path.abspath(os.path.dirname(__file__)) default_conf = os.path.join(self.path, 'sample', 'sigal.conf.py') self.settings = read_settings(default_conf) def test_sizes(self): "Test that image sizes are correctly read" self.assertTupleEqual(self.settings['img_size'], (640, 480)) self.assertTupleEqual(self.settings['thumb_size'], (200, 150)) def test_settings(self): self.assertEqual(self.settings['thumb_suffix'], '.tn') Add a test for the get_thumb function.# -*- coding:utf-8 -*- import os try: import unittest2 as unittest except ImportError: import unittest # NOQA from sigal.settings import read_settings, get_thumb class TestSettings(unittest.TestCase): "Read a settings file and check that the configuration is well done." def setUp(self): "Read the sample config file" self.path = os.path.abspath(os.path.dirname(__file__)) default_conf = os.path.join(self.path, 'sample', 'sigal.conf.py') self.settings = read_settings(default_conf) def test_sizes(self): "Test that image sizes are correctly read" self.assertTupleEqual(self.settings['img_size'], (640, 480)) self.assertTupleEqual(self.settings['thumb_size'], (200, 150)) def test_settings(self): self.assertEqual(self.settings['thumb_suffix'], '.tn') def test_thumb(self): self.assertEqual(get_thumb(self.settings, 'example.jpg'), 'thumbnails/example.tn.jpg') self.assertEqual(get_thumb(self.settings, 'test/example.jpg'), 'test/thumbnails/example.tn.jpg')
<commit_before># -*- coding:utf-8 -*- import os try: import unittest2 as unittest except ImportError: import unittest # NOQA from sigal.settings import read_settings class TestSettings(unittest.TestCase): "Read a settings file and check that the configuration is well done." def setUp(self): "Read the sample config file" self.path = os.path.abspath(os.path.dirname(__file__)) default_conf = os.path.join(self.path, 'sample', 'sigal.conf.py') self.settings = read_settings(default_conf) def test_sizes(self): "Test that image sizes are correctly read" self.assertTupleEqual(self.settings['img_size'], (640, 480)) self.assertTupleEqual(self.settings['thumb_size'], (200, 150)) def test_settings(self): self.assertEqual(self.settings['thumb_suffix'], '.tn') <commit_msg>Add a test for the get_thumb function.<commit_after># -*- coding:utf-8 -*- import os try: import unittest2 as unittest except ImportError: import unittest # NOQA from sigal.settings import read_settings, get_thumb class TestSettings(unittest.TestCase): "Read a settings file and check that the configuration is well done." def setUp(self): "Read the sample config file" self.path = os.path.abspath(os.path.dirname(__file__)) default_conf = os.path.join(self.path, 'sample', 'sigal.conf.py') self.settings = read_settings(default_conf) def test_sizes(self): "Test that image sizes are correctly read" self.assertTupleEqual(self.settings['img_size'], (640, 480)) self.assertTupleEqual(self.settings['thumb_size'], (200, 150)) def test_settings(self): self.assertEqual(self.settings['thumb_suffix'], '.tn') def test_thumb(self): self.assertEqual(get_thumb(self.settings, 'example.jpg'), 'thumbnails/example.tn.jpg') self.assertEqual(get_thumb(self.settings, 'test/example.jpg'), 'test/thumbnails/example.tn.jpg')
733f2006f84ade6033a4be2fe3334213b8a2ce85
mepcheck/savemeps.py
mepcheck/savemeps.py
import requests from bs4 import BeautifulSoup import pickle import os def save_meps(): url = "http://www.votewatch.eu//en/term8-european-parliament-members.html?limit=804" path = os.path.expanduser("~") r = requests.get(url) r.encoding = 'utf-8' soup = BeautifulSoup(r.text, "html.parser") meps = soup.find_all('tr') countries = soup.find_all('td', class_='margin_image middle_align') idx = [] i = 0 for mep, country in zip(meps[1:], countries): i += 1 ids = [i, mep.find('div').text.strip().lower(), country.text.strip().lower()] idx.append(ids) meps_path = os.path.join(path, ".meps") with open(meps_path, 'wb') as f: pickle.dump(idx, f, -1) print("File `.meps` generated in home directory") if __name__ == '__main__': save_meps()
import requests from bs4 import BeautifulSoup import pickle import os def save_meps(): # TODO - make this configurable url = "http://www.votewatch.eu//en/term9-european-parliament-members.html?limit=1000" path = os.path.expanduser("~") r = requests.get(url) r.encoding = 'utf-8' soup = BeautifulSoup(r.text, "html.parser") meps = soup.find_all('tr') countries = soup.find_all('td', class_='margin_image middle_align') idx = [] i = 0 for mep, country in zip(meps[1:], countries): i += 1 ids = [i, mep.find('div').text.strip().lower(), country.text.strip().lower()] idx.append(ids) meps_path = os.path.join(path, ".meps") with open(meps_path, 'wb') as f: pickle.dump(idx, f, -1) print("File `.meps` generated in home directory") if __name__ == '__main__': save_meps()
Update url for new term
[HOTFIX] Update url for new term There's a new term, I have to make this configurable
Python
mit
alanmarazzi/mepcheck
import requests from bs4 import BeautifulSoup import pickle import os def save_meps(): url = "http://www.votewatch.eu//en/term8-european-parliament-members.html?limit=804" path = os.path.expanduser("~") r = requests.get(url) r.encoding = 'utf-8' soup = BeautifulSoup(r.text, "html.parser") meps = soup.find_all('tr') countries = soup.find_all('td', class_='margin_image middle_align') idx = [] i = 0 for mep, country in zip(meps[1:], countries): i += 1 ids = [i, mep.find('div').text.strip().lower(), country.text.strip().lower()] idx.append(ids) meps_path = os.path.join(path, ".meps") with open(meps_path, 'wb') as f: pickle.dump(idx, f, -1) print("File `.meps` generated in home directory") if __name__ == '__main__': save_meps()[HOTFIX] Update url for new term There's a new term, I have to make this configurable
import requests from bs4 import BeautifulSoup import pickle import os def save_meps(): # TODO - make this configurable url = "http://www.votewatch.eu//en/term9-european-parliament-members.html?limit=1000" path = os.path.expanduser("~") r = requests.get(url) r.encoding = 'utf-8' soup = BeautifulSoup(r.text, "html.parser") meps = soup.find_all('tr') countries = soup.find_all('td', class_='margin_image middle_align') idx = [] i = 0 for mep, country in zip(meps[1:], countries): i += 1 ids = [i, mep.find('div').text.strip().lower(), country.text.strip().lower()] idx.append(ids) meps_path = os.path.join(path, ".meps") with open(meps_path, 'wb') as f: pickle.dump(idx, f, -1) print("File `.meps` generated in home directory") if __name__ == '__main__': save_meps()
<commit_before>import requests from bs4 import BeautifulSoup import pickle import os def save_meps(): url = "http://www.votewatch.eu//en/term8-european-parliament-members.html?limit=804" path = os.path.expanduser("~") r = requests.get(url) r.encoding = 'utf-8' soup = BeautifulSoup(r.text, "html.parser") meps = soup.find_all('tr') countries = soup.find_all('td', class_='margin_image middle_align') idx = [] i = 0 for mep, country in zip(meps[1:], countries): i += 1 ids = [i, mep.find('div').text.strip().lower(), country.text.strip().lower()] idx.append(ids) meps_path = os.path.join(path, ".meps") with open(meps_path, 'wb') as f: pickle.dump(idx, f, -1) print("File `.meps` generated in home directory") if __name__ == '__main__': save_meps()<commit_msg>[HOTFIX] Update url for new term There's a new term, I have to make this configurable<commit_after>
import requests from bs4 import BeautifulSoup import pickle import os def save_meps(): # TODO - make this configurable url = "http://www.votewatch.eu//en/term9-european-parliament-members.html?limit=1000" path = os.path.expanduser("~") r = requests.get(url) r.encoding = 'utf-8' soup = BeautifulSoup(r.text, "html.parser") meps = soup.find_all('tr') countries = soup.find_all('td', class_='margin_image middle_align') idx = [] i = 0 for mep, country in zip(meps[1:], countries): i += 1 ids = [i, mep.find('div').text.strip().lower(), country.text.strip().lower()] idx.append(ids) meps_path = os.path.join(path, ".meps") with open(meps_path, 'wb') as f: pickle.dump(idx, f, -1) print("File `.meps` generated in home directory") if __name__ == '__main__': save_meps()
import requests from bs4 import BeautifulSoup import pickle import os def save_meps(): url = "http://www.votewatch.eu//en/term8-european-parliament-members.html?limit=804" path = os.path.expanduser("~") r = requests.get(url) r.encoding = 'utf-8' soup = BeautifulSoup(r.text, "html.parser") meps = soup.find_all('tr') countries = soup.find_all('td', class_='margin_image middle_align') idx = [] i = 0 for mep, country in zip(meps[1:], countries): i += 1 ids = [i, mep.find('div').text.strip().lower(), country.text.strip().lower()] idx.append(ids) meps_path = os.path.join(path, ".meps") with open(meps_path, 'wb') as f: pickle.dump(idx, f, -1) print("File `.meps` generated in home directory") if __name__ == '__main__': save_meps()[HOTFIX] Update url for new term There's a new term, I have to make this configurableimport requests from bs4 import BeautifulSoup import pickle import os def save_meps(): # TODO - make this configurable url = "http://www.votewatch.eu//en/term9-european-parliament-members.html?limit=1000" path = os.path.expanduser("~") r = requests.get(url) r.encoding = 'utf-8' soup = BeautifulSoup(r.text, "html.parser") meps = soup.find_all('tr') countries = soup.find_all('td', class_='margin_image middle_align') idx = [] i = 0 for mep, country in zip(meps[1:], countries): i += 1 ids = [i, mep.find('div').text.strip().lower(), country.text.strip().lower()] idx.append(ids) meps_path = os.path.join(path, ".meps") with open(meps_path, 'wb') as f: pickle.dump(idx, f, -1) print("File `.meps` generated in home directory") if __name__ == '__main__': save_meps()
<commit_before>import requests from bs4 import BeautifulSoup import pickle import os def save_meps(): url = "http://www.votewatch.eu//en/term8-european-parliament-members.html?limit=804" path = os.path.expanduser("~") r = requests.get(url) r.encoding = 'utf-8' soup = BeautifulSoup(r.text, "html.parser") meps = soup.find_all('tr') countries = soup.find_all('td', class_='margin_image middle_align') idx = [] i = 0 for mep, country in zip(meps[1:], countries): i += 1 ids = [i, mep.find('div').text.strip().lower(), country.text.strip().lower()] idx.append(ids) meps_path = os.path.join(path, ".meps") with open(meps_path, 'wb') as f: pickle.dump(idx, f, -1) print("File `.meps` generated in home directory") if __name__ == '__main__': save_meps()<commit_msg>[HOTFIX] Update url for new term There's a new term, I have to make this configurable<commit_after>import requests from bs4 import BeautifulSoup import pickle import os def save_meps(): # TODO - make this configurable url = "http://www.votewatch.eu//en/term9-european-parliament-members.html?limit=1000" path = os.path.expanduser("~") r = requests.get(url) r.encoding = 'utf-8' soup = BeautifulSoup(r.text, "html.parser") meps = soup.find_all('tr') countries = soup.find_all('td', class_='margin_image middle_align') idx = [] i = 0 for mep, country in zip(meps[1:], countries): i += 1 ids = [i, mep.find('div').text.strip().lower(), country.text.strip().lower()] idx.append(ids) meps_path = os.path.join(path, ".meps") with open(meps_path, 'wb') as f: pickle.dump(idx, f, -1) print("File `.meps` generated in home directory") if __name__ == '__main__': save_meps()
bba325111b47c9ba7dfc0bc9556a655e3f5afcee
tools/jtag/discover.py
tools/jtag/discover.py
#! /usr/bin/env python ''' Simplistic chain discovery ''' import sys sys.path.append('../..') from playtag.cables.digilent import Jtagger from playtag.jtag.discover import Chain print Chain(Jtagger())
#! /usr/bin/env python ''' Simplistic chain discovery ''' import sys import os sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../..')) from playtag.cables.digilent import Jtagger from playtag.jtag.discover import Chain print Chain(Jtagger())
Make it work when executed from a different directory
Make it work when executed from a different directory
Python
mit
E3V3A/playtag,proteus-cpi/playtag
#! /usr/bin/env python ''' Simplistic chain discovery ''' import sys sys.path.append('../..') from playtag.cables.digilent import Jtagger from playtag.jtag.discover import Chain print Chain(Jtagger()) Make it work when executed from a different directory
#! /usr/bin/env python ''' Simplistic chain discovery ''' import sys import os sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../..')) from playtag.cables.digilent import Jtagger from playtag.jtag.discover import Chain print Chain(Jtagger())
<commit_before>#! /usr/bin/env python ''' Simplistic chain discovery ''' import sys sys.path.append('../..') from playtag.cables.digilent import Jtagger from playtag.jtag.discover import Chain print Chain(Jtagger()) <commit_msg>Make it work when executed from a different directory<commit_after>
#! /usr/bin/env python ''' Simplistic chain discovery ''' import sys import os sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../..')) from playtag.cables.digilent import Jtagger from playtag.jtag.discover import Chain print Chain(Jtagger())
#! /usr/bin/env python ''' Simplistic chain discovery ''' import sys sys.path.append('../..') from playtag.cables.digilent import Jtagger from playtag.jtag.discover import Chain print Chain(Jtagger()) Make it work when executed from a different directory#! /usr/bin/env python ''' Simplistic chain discovery ''' import sys import os sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../..')) from playtag.cables.digilent import Jtagger from playtag.jtag.discover import Chain print Chain(Jtagger())
<commit_before>#! /usr/bin/env python ''' Simplistic chain discovery ''' import sys sys.path.append('../..') from playtag.cables.digilent import Jtagger from playtag.jtag.discover import Chain print Chain(Jtagger()) <commit_msg>Make it work when executed from a different directory<commit_after>#! /usr/bin/env python ''' Simplistic chain discovery ''' import sys import os sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../..')) from playtag.cables.digilent import Jtagger from playtag.jtag.discover import Chain print Chain(Jtagger())
78351e785f413f44e07faa0b8a856639296a5591
examples/drawing/ego_graph.py
examples/drawing/ego_graph.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ __author__="""Drew Conway (drew.conway@nyu.edu)""" from operator import itemgetter import networkx as nx import matplotlib.pyplot as plt if __name__ == '__main__': # Create a BA model graph n=1000 m=2 G=nx.generators.barabasi_albert_graph(n,m) # find node with largest degree node_and_degree=G.degree(with_labels=True) (largest_hub,degree)=sorted(node_and_degree.items(),key=itemgetter(1))[-1] # Create ego graph of main hub hub_ego=nx.ego_graph(G,largest_hub) # Draw graph pos=nx.spring_layout(hub_ego) nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False) # Draw ego as large and red nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r') plt.savefig('main_ego.png') plt.show()
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ __author__="""Drew Conway (drew.conway@nyu.edu)""" from operator import itemgetter import networkx as nx import matplotlib.pyplot as plt if __name__ == '__main__': # Create a BA model graph n=1000 m=2 G=nx.generators.barabasi_albert_graph(n,m) # find node with largest degree node_and_degree=G.degree(with_labels=True) (largest_hub,degree)=sorted(node_and_degree.items(),key=itemgetter(1))[-1] # Create ego graph of main hub hub_ego=nx.ego_graph(G,largest_hub) # Draw graph pos=nx.spring_layout(hub_ego) nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False) # Draw ego as large and red nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r') plt.savefig('ego_graph.png') plt.show()
Change name of output file in example
Change name of output file in example --HG-- extra : convert_revision : svn%3A3ed01bd8-26fb-0310-9e4c-ca1a4053419f/networkx/trunk%401549
Python
bsd-3-clause
RMKD/networkx,RMKD/networkx,jni/networkx,kernc/networkx,sharifulgeo/networkx,debsankha/networkx,kai5263499/networkx,farhaanbukhsh/networkx,jfinkels/networkx,NvanAdrichem/networkx,jakevdp/networkx,dmoliveira/networkx,jni/networkx,jakevdp/networkx,SanketDG/networkx,kernc/networkx,beni55/networkx,sharifulgeo/networkx,dhimmel/networkx,andnovar/networkx,bzero/networkx,aureooms/networkx,dhimmel/networkx,jtorrents/networkx,JamesClough/networkx,farhaanbukhsh/networkx,tmilicic/networkx,debsankha/networkx,bzero/networkx,ionanrozenfeld/networkx,aureooms/networkx,ghdk/networkx,OrkoHunter/networkx,dhimmel/networkx,chrisnatali/networkx,jtorrents/networkx,RMKD/networkx,bzero/networkx,farhaanbukhsh/networkx,nathania/networkx,jni/networkx,debsankha/networkx,chrisnatali/networkx,kernc/networkx,aureooms/networkx,blublud/networkx,goulu/networkx,blublud/networkx,chrisnatali/networkx,ionanrozenfeld/networkx,harlowja/networkx,wasade/networkx,cmtm/networkx,jcurbelo/networkx,harlowja/networkx,nathania/networkx,harlowja/networkx,kai5263499/networkx,blublud/networkx,ghdk/networkx,yashu-seth/networkx,Sixshaman/networkx,dmoliveira/networkx,ghdk/networkx,dmoliveira/networkx,ltiao/networkx,jakevdp/networkx,sharifulgeo/networkx,michaelpacer/networkx,nathania/networkx,ionanrozenfeld/networkx,kai5263499/networkx
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ __author__="""Drew Conway (drew.conway@nyu.edu)""" from operator import itemgetter import networkx as nx import matplotlib.pyplot as plt if __name__ == '__main__': # Create a BA model graph n=1000 m=2 G=nx.generators.barabasi_albert_graph(n,m) # find node with largest degree node_and_degree=G.degree(with_labels=True) (largest_hub,degree)=sorted(node_and_degree.items(),key=itemgetter(1))[-1] # Create ego graph of main hub hub_ego=nx.ego_graph(G,largest_hub) # Draw graph pos=nx.spring_layout(hub_ego) nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False) # Draw ego as large and red nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r') plt.savefig('main_ego.png') plt.show() Change name of output file in example --HG-- extra : convert_revision : svn%3A3ed01bd8-26fb-0310-9e4c-ca1a4053419f/networkx/trunk%401549
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ __author__="""Drew Conway (drew.conway@nyu.edu)""" from operator import itemgetter import networkx as nx import matplotlib.pyplot as plt if __name__ == '__main__': # Create a BA model graph n=1000 m=2 G=nx.generators.barabasi_albert_graph(n,m) # find node with largest degree node_and_degree=G.degree(with_labels=True) (largest_hub,degree)=sorted(node_and_degree.items(),key=itemgetter(1))[-1] # Create ego graph of main hub hub_ego=nx.ego_graph(G,largest_hub) # Draw graph pos=nx.spring_layout(hub_ego) nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False) # Draw ego as large and red nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r') plt.savefig('ego_graph.png') plt.show()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- """ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ __author__="""Drew Conway (drew.conway@nyu.edu)""" from operator import itemgetter import networkx as nx import matplotlib.pyplot as plt if __name__ == '__main__': # Create a BA model graph n=1000 m=2 G=nx.generators.barabasi_albert_graph(n,m) # find node with largest degree node_and_degree=G.degree(with_labels=True) (largest_hub,degree)=sorted(node_and_degree.items(),key=itemgetter(1))[-1] # Create ego graph of main hub hub_ego=nx.ego_graph(G,largest_hub) # Draw graph pos=nx.spring_layout(hub_ego) nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False) # Draw ego as large and red nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r') plt.savefig('main_ego.png') plt.show() <commit_msg>Change name of output file in example --HG-- extra : convert_revision : svn%3A3ed01bd8-26fb-0310-9e4c-ca1a4053419f/networkx/trunk%401549<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ __author__="""Drew Conway (drew.conway@nyu.edu)""" from operator import itemgetter import networkx as nx import matplotlib.pyplot as plt if __name__ == '__main__': # Create a BA model graph n=1000 m=2 G=nx.generators.barabasi_albert_graph(n,m) # find node with largest degree node_and_degree=G.degree(with_labels=True) (largest_hub,degree)=sorted(node_and_degree.items(),key=itemgetter(1))[-1] # Create ego graph of main hub hub_ego=nx.ego_graph(G,largest_hub) # Draw graph pos=nx.spring_layout(hub_ego) nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False) # Draw ego as large and red nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r') plt.savefig('ego_graph.png') plt.show()
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ __author__="""Drew Conway (drew.conway@nyu.edu)""" from operator import itemgetter import networkx as nx import matplotlib.pyplot as plt if __name__ == '__main__': # Create a BA model graph n=1000 m=2 G=nx.generators.barabasi_albert_graph(n,m) # find node with largest degree node_and_degree=G.degree(with_labels=True) (largest_hub,degree)=sorted(node_and_degree.items(),key=itemgetter(1))[-1] # Create ego graph of main hub hub_ego=nx.ego_graph(G,largest_hub) # Draw graph pos=nx.spring_layout(hub_ego) nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False) # Draw ego as large and red nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r') plt.savefig('main_ego.png') plt.show() Change name of output file in example --HG-- extra : convert_revision : svn%3A3ed01bd8-26fb-0310-9e4c-ca1a4053419f/networkx/trunk%401549#!/usr/bin/env python # -*- coding: utf-8 -*- """ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ __author__="""Drew Conway (drew.conway@nyu.edu)""" from operator import itemgetter import networkx as nx import matplotlib.pyplot as plt if __name__ == '__main__': # Create a BA model graph n=1000 m=2 G=nx.generators.barabasi_albert_graph(n,m) # find node with largest degree node_and_degree=G.degree(with_labels=True) (largest_hub,degree)=sorted(node_and_degree.items(),key=itemgetter(1))[-1] # Create ego graph of main hub hub_ego=nx.ego_graph(G,largest_hub) # Draw graph pos=nx.spring_layout(hub_ego) nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False) # Draw ego as large and red nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r') plt.savefig('ego_graph.png') plt.show()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- """ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ __author__="""Drew Conway (drew.conway@nyu.edu)""" from operator import itemgetter import networkx as nx import matplotlib.pyplot as plt if __name__ == '__main__': # Create a BA model graph n=1000 m=2 G=nx.generators.barabasi_albert_graph(n,m) # find node with largest degree node_and_degree=G.degree(with_labels=True) (largest_hub,degree)=sorted(node_and_degree.items(),key=itemgetter(1))[-1] # Create ego graph of main hub hub_ego=nx.ego_graph(G,largest_hub) # Draw graph pos=nx.spring_layout(hub_ego) nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False) # Draw ego as large and red nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r') plt.savefig('main_ego.png') plt.show() <commit_msg>Change name of output file in example --HG-- extra : convert_revision : svn%3A3ed01bd8-26fb-0310-9e4c-ca1a4053419f/networkx/trunk%401549<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- """ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ __author__="""Drew Conway (drew.conway@nyu.edu)""" from operator import itemgetter import networkx as nx import matplotlib.pyplot as plt if __name__ == '__main__': # Create a BA model graph n=1000 m=2 G=nx.generators.barabasi_albert_graph(n,m) # find node with largest degree node_and_degree=G.degree(with_labels=True) (largest_hub,degree)=sorted(node_and_degree.items(),key=itemgetter(1))[-1] # Create ego graph of main hub hub_ego=nx.ego_graph(G,largest_hub) # Draw graph pos=nx.spring_layout(hub_ego) nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False) # Draw ego as large and red nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r') plt.savefig('ego_graph.png') plt.show()
6ecbcfd1b132b0c4acd2d413818348a2fe2b6bfe
Cauldron/utils/referencecompat.py
Cauldron/utils/referencecompat.py
# -*- coding: utf-8 -*- try: from __builtins__ import ReferenceError except (NameError, ImportError): # pragma: no cover from weakref import ReferenceError __all__ = ['ReferenceError']
# -*- coding: utf-8 -*- import sys try: if sys.version_info[0] < 3: from __builtins__ import ReferenceError else: from builtins import ReferenceError except (NameError, ImportError): # pragma: no cover from weakref import ReferenceError __all__ = ['ReferenceError']
Fix a py3 bug in reference compatibility
Fix a py3 bug in reference compatibility
Python
bsd-3-clause
alexrudy/Cauldron
# -*- coding: utf-8 -*- try: from __builtins__ import ReferenceError except (NameError, ImportError): # pragma: no cover from weakref import ReferenceError __all__ = ['ReferenceError'] Fix a py3 bug in reference compatibility
# -*- coding: utf-8 -*- import sys try: if sys.version_info[0] < 3: from __builtins__ import ReferenceError else: from builtins import ReferenceError except (NameError, ImportError): # pragma: no cover from weakref import ReferenceError __all__ = ['ReferenceError']
<commit_before># -*- coding: utf-8 -*- try: from __builtins__ import ReferenceError except (NameError, ImportError): # pragma: no cover from weakref import ReferenceError __all__ = ['ReferenceError'] <commit_msg>Fix a py3 bug in reference compatibility<commit_after>
# -*- coding: utf-8 -*- import sys try: if sys.version_info[0] < 3: from __builtins__ import ReferenceError else: from builtins import ReferenceError except (NameError, ImportError): # pragma: no cover from weakref import ReferenceError __all__ = ['ReferenceError']
# -*- coding: utf-8 -*- try: from __builtins__ import ReferenceError except (NameError, ImportError): # pragma: no cover from weakref import ReferenceError __all__ = ['ReferenceError'] Fix a py3 bug in reference compatibility# -*- coding: utf-8 -*- import sys try: if sys.version_info[0] < 3: from __builtins__ import ReferenceError else: from builtins import ReferenceError except (NameError, ImportError): # pragma: no cover from weakref import ReferenceError __all__ = ['ReferenceError']
<commit_before># -*- coding: utf-8 -*- try: from __builtins__ import ReferenceError except (NameError, ImportError): # pragma: no cover from weakref import ReferenceError __all__ = ['ReferenceError'] <commit_msg>Fix a py3 bug in reference compatibility<commit_after># -*- coding: utf-8 -*- import sys try: if sys.version_info[0] < 3: from __builtins__ import ReferenceError else: from builtins import ReferenceError except (NameError, ImportError): # pragma: no cover from weakref import ReferenceError __all__ = ['ReferenceError']
492004049da87744cd96a6e6afeb9a6239a8ac44
ocradmin/lib/nodetree/registry.py
ocradmin/lib/nodetree/registry.py
""" Registry class and global node registry. """ class NotRegistered(KeyError): pass __all__ = ["NodeRegistry", "nodes"] class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node in the node registry. The node will be automatically instantiated if not already an instance. """ self[node.name] = inspect.isclass(node) and node() or node def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def filter_types(self, type): """Return all nodes of a specific type.""" return dict((name, node) for name, node in self.iteritems() if node.type == type) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry()
""" Registry class and global node registry. """ import inspect class NotRegistered(KeyError): pass class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node class in the node registry.""" self[node.name] = inspect.isclass(node) and node or node.__class__ def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def get_by_attr(self, attr, value=None): """Return all nodes of a specific type that have a matching attr. If `value` is given, only return nodes where the attr value matches.""" ret = {} for name, node in self.iteritems(): if hasattr(node, attr) and value is None\ or hasattr(node, name) and getattr(node, name) == value: ret[name] = node return ret def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry()
Fix missing import. Add method to get all nodes with a particular attribute
Fix missing import. Add method to get all nodes with a particular attribute
Python
apache-2.0
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
""" Registry class and global node registry. """ class NotRegistered(KeyError): pass __all__ = ["NodeRegistry", "nodes"] class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node in the node registry. The node will be automatically instantiated if not already an instance. """ self[node.name] = inspect.isclass(node) and node() or node def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def filter_types(self, type): """Return all nodes of a specific type.""" return dict((name, node) for name, node in self.iteritems() if node.type == type) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry() Fix missing import. Add method to get all nodes with a particular attribute
""" Registry class and global node registry. """ import inspect class NotRegistered(KeyError): pass class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node class in the node registry.""" self[node.name] = inspect.isclass(node) and node or node.__class__ def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def get_by_attr(self, attr, value=None): """Return all nodes of a specific type that have a matching attr. If `value` is given, only return nodes where the attr value matches.""" ret = {} for name, node in self.iteritems(): if hasattr(node, attr) and value is None\ or hasattr(node, name) and getattr(node, name) == value: ret[name] = node return ret def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry()
<commit_before>""" Registry class and global node registry. """ class NotRegistered(KeyError): pass __all__ = ["NodeRegistry", "nodes"] class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node in the node registry. The node will be automatically instantiated if not already an instance. """ self[node.name] = inspect.isclass(node) and node() or node def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def filter_types(self, type): """Return all nodes of a specific type.""" return dict((name, node) for name, node in self.iteritems() if node.type == type) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry() <commit_msg>Fix missing import. Add method to get all nodes with a particular attribute<commit_after>
""" Registry class and global node registry. """ import inspect class NotRegistered(KeyError): pass class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node class in the node registry.""" self[node.name] = inspect.isclass(node) and node or node.__class__ def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def get_by_attr(self, attr, value=None): """Return all nodes of a specific type that have a matching attr. If `value` is given, only return nodes where the attr value matches.""" ret = {} for name, node in self.iteritems(): if hasattr(node, attr) and value is None\ or hasattr(node, name) and getattr(node, name) == value: ret[name] = node return ret def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry()
""" Registry class and global node registry. """ class NotRegistered(KeyError): pass __all__ = ["NodeRegistry", "nodes"] class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node in the node registry. The node will be automatically instantiated if not already an instance. """ self[node.name] = inspect.isclass(node) and node() or node def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def filter_types(self, type): """Return all nodes of a specific type.""" return dict((name, node) for name, node in self.iteritems() if node.type == type) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry() Fix missing import. Add method to get all nodes with a particular attribute""" Registry class and global node registry. """ import inspect class NotRegistered(KeyError): pass class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node class in the node registry.""" self[node.name] = inspect.isclass(node) and node or node.__class__ def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def get_by_attr(self, attr, value=None): """Return all nodes of a specific type that have a matching attr. If `value` is given, only return nodes where the attr value matches.""" ret = {} for name, node in self.iteritems(): if hasattr(node, attr) and value is None\ or hasattr(node, name) and getattr(node, name) == value: ret[name] = node return ret def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry()
<commit_before>""" Registry class and global node registry. """ class NotRegistered(KeyError): pass __all__ = ["NodeRegistry", "nodes"] class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node in the node registry. The node will be automatically instantiated if not already an instance. """ self[node.name] = inspect.isclass(node) and node() or node def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def filter_types(self, type): """Return all nodes of a specific type.""" return dict((name, node) for name, node in self.iteritems() if node.type == type) def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry() <commit_msg>Fix missing import. Add method to get all nodes with a particular attribute<commit_after>""" Registry class and global node registry. """ import inspect class NotRegistered(KeyError): pass class NodeRegistry(dict): NotRegistered = NotRegistered def register(self, node): """Register a node class in the node registry.""" self[node.name] = inspect.isclass(node) and node or node.__class__ def unregister(self, name): """Unregister node by name.""" try: # Might be a node class name = name.name except AttributeError: pass self.pop(name) def get_by_attr(self, attr, value=None): """Return all nodes of a specific type that have a matching attr. If `value` is given, only return nodes where the attr value matches.""" ret = {} for name, node in self.iteritems(): if hasattr(node, attr) and value is None\ or hasattr(node, name) and getattr(node, name) == value: ret[name] = node return ret def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: raise self.NotRegistered(key) def pop(self, key, *args): try: return dict.pop(self, key, *args) except KeyError: raise self.NotRegistered(key) nodes = NodeRegistry()
0225177c39df95bc12d9d9b53433f310d083905f
tests/test_heroku.py
tests/test_heroku.py
"""Tests for the Wallace API.""" import subprocess import re import os import requests class TestHeroku(object): """The Heroku test class.""" sandbox_output = subprocess.check_output( "cd examples/bartlett1932; wallace sandbox --verbose", shell=True) os.environ['app_id'] = re.search( 'Running as experiment (.*)...', sandbox_output).group(1) @classmethod def teardown_class(cls): """Remove the app from Heroku.""" app_id = os.environ['app_id'] subprocess.call( "heroku apps:destroy --app {} --confirm {}".format(app_id, app_id), shell=True) def test_summary(self): """Launch the experiment on Heroku.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/summary".format(app_id)) assert r.json()['status'] == [] def test_robots(self): """Ensure that robots.txt can be accessed.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/robots.txt".format(app_id)) assert r.status_code == 200
"""Tests for the Wallace API.""" import subprocess import re import os import requests class TestHeroku(object): """The Heroku test class.""" sandbox_output = subprocess.check_output( "cd examples/bartlett1932; wallace sandbox --verbose", shell=True) os.environ['app_id'] = re.search( 'Running as experiment (.*)...', sandbox_output).group(1) @classmethod def teardown_class(cls): """Remove the app from Heroku.""" app_id = os.environ['app_id'] subprocess.call( "heroku apps:destroy --app {} --confirm {}".format(app_id, app_id), shell=True) def test_summary(self): """Launch the experiment on Heroku.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/summary".format(app_id)) assert r.json()['status'] == [] def test_robots(self): """Ensure that robots.txt can be accessed.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/robots.txt".format(app_id)) assert r.status_code == 200 def test_nonexistent_route(self): """Ensure that a nonexistent route returns a 500 error.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/nope".format(app_id)) assert r.status_code == 500
Add test for correct error for nonexistent routes
Add test for correct error for nonexistent routes
Python
mit
berkeley-cocosci/Wallace,jcpeterson/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,suchow/Wallace,Dallinger/Dallinger,berkeley-cocosci/Wallace,berkeley-cocosci/Wallace,jcpeterson/Dallinger,jcpeterson/Dallinger,suchow/Wallace,suchow/Wallace,jcpeterson/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger
"""Tests for the Wallace API.""" import subprocess import re import os import requests class TestHeroku(object): """The Heroku test class.""" sandbox_output = subprocess.check_output( "cd examples/bartlett1932; wallace sandbox --verbose", shell=True) os.environ['app_id'] = re.search( 'Running as experiment (.*)...', sandbox_output).group(1) @classmethod def teardown_class(cls): """Remove the app from Heroku.""" app_id = os.environ['app_id'] subprocess.call( "heroku apps:destroy --app {} --confirm {}".format(app_id, app_id), shell=True) def test_summary(self): """Launch the experiment on Heroku.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/summary".format(app_id)) assert r.json()['status'] == [] def test_robots(self): """Ensure that robots.txt can be accessed.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/robots.txt".format(app_id)) assert r.status_code == 200 Add test for correct error for nonexistent routes
"""Tests for the Wallace API.""" import subprocess import re import os import requests class TestHeroku(object): """The Heroku test class.""" sandbox_output = subprocess.check_output( "cd examples/bartlett1932; wallace sandbox --verbose", shell=True) os.environ['app_id'] = re.search( 'Running as experiment (.*)...', sandbox_output).group(1) @classmethod def teardown_class(cls): """Remove the app from Heroku.""" app_id = os.environ['app_id'] subprocess.call( "heroku apps:destroy --app {} --confirm {}".format(app_id, app_id), shell=True) def test_summary(self): """Launch the experiment on Heroku.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/summary".format(app_id)) assert r.json()['status'] == [] def test_robots(self): """Ensure that robots.txt can be accessed.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/robots.txt".format(app_id)) assert r.status_code == 200 def test_nonexistent_route(self): """Ensure that a nonexistent route returns a 500 error.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/nope".format(app_id)) assert r.status_code == 500
<commit_before>"""Tests for the Wallace API.""" import subprocess import re import os import requests class TestHeroku(object): """The Heroku test class.""" sandbox_output = subprocess.check_output( "cd examples/bartlett1932; wallace sandbox --verbose", shell=True) os.environ['app_id'] = re.search( 'Running as experiment (.*)...', sandbox_output).group(1) @classmethod def teardown_class(cls): """Remove the app from Heroku.""" app_id = os.environ['app_id'] subprocess.call( "heroku apps:destroy --app {} --confirm {}".format(app_id, app_id), shell=True) def test_summary(self): """Launch the experiment on Heroku.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/summary".format(app_id)) assert r.json()['status'] == [] def test_robots(self): """Ensure that robots.txt can be accessed.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/robots.txt".format(app_id)) assert r.status_code == 200 <commit_msg>Add test for correct error for nonexistent routes<commit_after>
"""Tests for the Wallace API.""" import subprocess import re import os import requests class TestHeroku(object): """The Heroku test class.""" sandbox_output = subprocess.check_output( "cd examples/bartlett1932; wallace sandbox --verbose", shell=True) os.environ['app_id'] = re.search( 'Running as experiment (.*)...', sandbox_output).group(1) @classmethod def teardown_class(cls): """Remove the app from Heroku.""" app_id = os.environ['app_id'] subprocess.call( "heroku apps:destroy --app {} --confirm {}".format(app_id, app_id), shell=True) def test_summary(self): """Launch the experiment on Heroku.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/summary".format(app_id)) assert r.json()['status'] == [] def test_robots(self): """Ensure that robots.txt can be accessed.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/robots.txt".format(app_id)) assert r.status_code == 200 def test_nonexistent_route(self): """Ensure that a nonexistent route returns a 500 error.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/nope".format(app_id)) assert r.status_code == 500
"""Tests for the Wallace API.""" import subprocess import re import os import requests class TestHeroku(object): """The Heroku test class.""" sandbox_output = subprocess.check_output( "cd examples/bartlett1932; wallace sandbox --verbose", shell=True) os.environ['app_id'] = re.search( 'Running as experiment (.*)...', sandbox_output).group(1) @classmethod def teardown_class(cls): """Remove the app from Heroku.""" app_id = os.environ['app_id'] subprocess.call( "heroku apps:destroy --app {} --confirm {}".format(app_id, app_id), shell=True) def test_summary(self): """Launch the experiment on Heroku.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/summary".format(app_id)) assert r.json()['status'] == [] def test_robots(self): """Ensure that robots.txt can be accessed.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/robots.txt".format(app_id)) assert r.status_code == 200 Add test for correct error for nonexistent routes"""Tests for the Wallace API.""" import subprocess import re import os import requests class TestHeroku(object): """The Heroku test class.""" sandbox_output = subprocess.check_output( "cd examples/bartlett1932; wallace sandbox --verbose", shell=True) os.environ['app_id'] = re.search( 'Running as experiment (.*)...', sandbox_output).group(1) @classmethod def teardown_class(cls): """Remove the app from Heroku.""" app_id = os.environ['app_id'] subprocess.call( "heroku apps:destroy --app {} --confirm {}".format(app_id, app_id), shell=True) def test_summary(self): """Launch the experiment on Heroku.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/summary".format(app_id)) assert r.json()['status'] == [] def test_robots(self): """Ensure that robots.txt can be accessed.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/robots.txt".format(app_id)) assert r.status_code == 200 def test_nonexistent_route(self): """Ensure that a nonexistent route returns a 500 error.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/nope".format(app_id)) assert r.status_code == 500
<commit_before>"""Tests for the Wallace API.""" import subprocess import re import os import requests class TestHeroku(object): """The Heroku test class.""" sandbox_output = subprocess.check_output( "cd examples/bartlett1932; wallace sandbox --verbose", shell=True) os.environ['app_id'] = re.search( 'Running as experiment (.*)...', sandbox_output).group(1) @classmethod def teardown_class(cls): """Remove the app from Heroku.""" app_id = os.environ['app_id'] subprocess.call( "heroku apps:destroy --app {} --confirm {}".format(app_id, app_id), shell=True) def test_summary(self): """Launch the experiment on Heroku.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/summary".format(app_id)) assert r.json()['status'] == [] def test_robots(self): """Ensure that robots.txt can be accessed.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/robots.txt".format(app_id)) assert r.status_code == 200 <commit_msg>Add test for correct error for nonexistent routes<commit_after>"""Tests for the Wallace API.""" import subprocess import re import os import requests class TestHeroku(object): """The Heroku test class.""" sandbox_output = subprocess.check_output( "cd examples/bartlett1932; wallace sandbox --verbose", shell=True) os.environ['app_id'] = re.search( 'Running as experiment (.*)...', sandbox_output).group(1) @classmethod def teardown_class(cls): """Remove the app from Heroku.""" app_id = os.environ['app_id'] subprocess.call( "heroku apps:destroy --app {} --confirm {}".format(app_id, app_id), shell=True) def test_summary(self): """Launch the experiment on Heroku.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/summary".format(app_id)) assert r.json()['status'] == [] def test_robots(self): """Ensure that robots.txt can be accessed.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/robots.txt".format(app_id)) assert r.status_code == 200 def test_nonexistent_route(self): """Ensure that a nonexistent route returns a 500 error.""" app_id = os.environ['app_id'] r = requests.get("http://{}.herokuapp.com/nope".format(app_id)) assert r.status_code == 500
f97d9d6462ce9e60c1811bd40428a1f30835ce95
hb_res/storage/FileExplanationStorage.py
hb_res/storage/FileExplanationStorage.py
from hb_res.storage import ExplanationStorage __author__ = 'skird' import codecs from hb_res.explanations.Explanation import Explanation class FileExplanationStorage(ExplanationStorage): """ Class representing explanation resource connected with some text file """ def __init__(self, path_to_file): self.file_name = path_to_file self.descriptor = codecs.open(self.file_name, mode='a', encoding='utf-8') def entries(self): self.descriptor.flush() for line in open(self.file_name): yield Explanation.decode(line.strip()) def add_entry(self, entry: Explanation) -> None: print(entry, file=self.descriptor) def clear(self) -> None: self.descriptor = codecs.open(self.file_name, mode='w', encoding='utf-8') def close(self) -> None: self.descriptor.flush() def __getitem__(self, key): for explanation in self.entries(): if explanation.key == key: return explanation
from hb_res.storage import ExplanationStorage from hb_res.explanations.Explanation import Explanation __author__ = 'skird' class FileExplanationStorage(ExplanationStorage): """ Class representing explanation resource connected with some text file """ def __init__(self, path_to_file): self.file_name = path_to_file self.write_desc = None def entries(self): if self.write_desc is not None: self.write_desc.flush() with open(self.file_name, encoding='utf-8') as read_desc: for line in read_desc: yield Explanation.decode(line.strip()) def add_entry(self, entry: Explanation) -> None: if self.write_desc is None: self.write_desc = open(self.file_name, mode='a', encoding='utf-8') print(entry, file=self.write_desc) def clear(self) -> None: if self.write_desc is not None: self.write_desc.close() self.write_desc = open(self.file_name, mode='w', encoding='utf-8') def close(self) -> None: if self.write_desc is not None: self.write_desc.close() def __getitem__(self, key): for explanation in self.entries(): if explanation.key == key: return explanation
Fix permission-denied error in read-only context
Fix permission-denied error in read-only context
Python
mit
hatbot-team/hatbot_resources
from hb_res.storage import ExplanationStorage __author__ = 'skird' import codecs from hb_res.explanations.Explanation import Explanation class FileExplanationStorage(ExplanationStorage): """ Class representing explanation resource connected with some text file """ def __init__(self, path_to_file): self.file_name = path_to_file self.descriptor = codecs.open(self.file_name, mode='a', encoding='utf-8') def entries(self): self.descriptor.flush() for line in open(self.file_name): yield Explanation.decode(line.strip()) def add_entry(self, entry: Explanation) -> None: print(entry, file=self.descriptor) def clear(self) -> None: self.descriptor = codecs.open(self.file_name, mode='w', encoding='utf-8') def close(self) -> None: self.descriptor.flush() def __getitem__(self, key): for explanation in self.entries(): if explanation.key == key: return explanation Fix permission-denied error in read-only context
from hb_res.storage import ExplanationStorage from hb_res.explanations.Explanation import Explanation __author__ = 'skird' class FileExplanationStorage(ExplanationStorage): """ Class representing explanation resource connected with some text file """ def __init__(self, path_to_file): self.file_name = path_to_file self.write_desc = None def entries(self): if self.write_desc is not None: self.write_desc.flush() with open(self.file_name, encoding='utf-8') as read_desc: for line in read_desc: yield Explanation.decode(line.strip()) def add_entry(self, entry: Explanation) -> None: if self.write_desc is None: self.write_desc = open(self.file_name, mode='a', encoding='utf-8') print(entry, file=self.write_desc) def clear(self) -> None: if self.write_desc is not None: self.write_desc.close() self.write_desc = open(self.file_name, mode='w', encoding='utf-8') def close(self) -> None: if self.write_desc is not None: self.write_desc.close() def __getitem__(self, key): for explanation in self.entries(): if explanation.key == key: return explanation
<commit_before>from hb_res.storage import ExplanationStorage __author__ = 'skird' import codecs from hb_res.explanations.Explanation import Explanation class FileExplanationStorage(ExplanationStorage): """ Class representing explanation resource connected with some text file """ def __init__(self, path_to_file): self.file_name = path_to_file self.descriptor = codecs.open(self.file_name, mode='a', encoding='utf-8') def entries(self): self.descriptor.flush() for line in open(self.file_name): yield Explanation.decode(line.strip()) def add_entry(self, entry: Explanation) -> None: print(entry, file=self.descriptor) def clear(self) -> None: self.descriptor = codecs.open(self.file_name, mode='w', encoding='utf-8') def close(self) -> None: self.descriptor.flush() def __getitem__(self, key): for explanation in self.entries(): if explanation.key == key: return explanation <commit_msg>Fix permission-denied error in read-only context<commit_after>
from hb_res.storage import ExplanationStorage from hb_res.explanations.Explanation import Explanation __author__ = 'skird' class FileExplanationStorage(ExplanationStorage): """ Class representing explanation resource connected with some text file """ def __init__(self, path_to_file): self.file_name = path_to_file self.write_desc = None def entries(self): if self.write_desc is not None: self.write_desc.flush() with open(self.file_name, encoding='utf-8') as read_desc: for line in read_desc: yield Explanation.decode(line.strip()) def add_entry(self, entry: Explanation) -> None: if self.write_desc is None: self.write_desc = open(self.file_name, mode='a', encoding='utf-8') print(entry, file=self.write_desc) def clear(self) -> None: if self.write_desc is not None: self.write_desc.close() self.write_desc = open(self.file_name, mode='w', encoding='utf-8') def close(self) -> None: if self.write_desc is not None: self.write_desc.close() def __getitem__(self, key): for explanation in self.entries(): if explanation.key == key: return explanation
from hb_res.storage import ExplanationStorage __author__ = 'skird' import codecs from hb_res.explanations.Explanation import Explanation class FileExplanationStorage(ExplanationStorage): """ Class representing explanation resource connected with some text file """ def __init__(self, path_to_file): self.file_name = path_to_file self.descriptor = codecs.open(self.file_name, mode='a', encoding='utf-8') def entries(self): self.descriptor.flush() for line in open(self.file_name): yield Explanation.decode(line.strip()) def add_entry(self, entry: Explanation) -> None: print(entry, file=self.descriptor) def clear(self) -> None: self.descriptor = codecs.open(self.file_name, mode='w', encoding='utf-8') def close(self) -> None: self.descriptor.flush() def __getitem__(self, key): for explanation in self.entries(): if explanation.key == key: return explanation Fix permission-denied error in read-only contextfrom hb_res.storage import ExplanationStorage from hb_res.explanations.Explanation import Explanation __author__ = 'skird' class FileExplanationStorage(ExplanationStorage): """ Class representing explanation resource connected with some text file """ def __init__(self, path_to_file): self.file_name = path_to_file self.write_desc = None def entries(self): if self.write_desc is not None: self.write_desc.flush() with open(self.file_name, encoding='utf-8') as read_desc: for line in read_desc: yield Explanation.decode(line.strip()) def add_entry(self, entry: Explanation) -> None: if self.write_desc is None: self.write_desc = open(self.file_name, mode='a', encoding='utf-8') print(entry, file=self.write_desc) def clear(self) -> None: if self.write_desc is not None: self.write_desc.close() self.write_desc = open(self.file_name, mode='w', encoding='utf-8') def close(self) -> None: if self.write_desc is not None: self.write_desc.close() def __getitem__(self, key): for explanation in self.entries(): if explanation.key == key: return explanation
<commit_before>from hb_res.storage import ExplanationStorage __author__ = 'skird' import codecs from hb_res.explanations.Explanation import Explanation class FileExplanationStorage(ExplanationStorage): """ Class representing explanation resource connected with some text file """ def __init__(self, path_to_file): self.file_name = path_to_file self.descriptor = codecs.open(self.file_name, mode='a', encoding='utf-8') def entries(self): self.descriptor.flush() for line in open(self.file_name): yield Explanation.decode(line.strip()) def add_entry(self, entry: Explanation) -> None: print(entry, file=self.descriptor) def clear(self) -> None: self.descriptor = codecs.open(self.file_name, mode='w', encoding='utf-8') def close(self) -> None: self.descriptor.flush() def __getitem__(self, key): for explanation in self.entries(): if explanation.key == key: return explanation <commit_msg>Fix permission-denied error in read-only context<commit_after>from hb_res.storage import ExplanationStorage from hb_res.explanations.Explanation import Explanation __author__ = 'skird' class FileExplanationStorage(ExplanationStorage): """ Class representing explanation resource connected with some text file """ def __init__(self, path_to_file): self.file_name = path_to_file self.write_desc = None def entries(self): if self.write_desc is not None: self.write_desc.flush() with open(self.file_name, encoding='utf-8') as read_desc: for line in read_desc: yield Explanation.decode(line.strip()) def add_entry(self, entry: Explanation) -> None: if self.write_desc is None: self.write_desc = open(self.file_name, mode='a', encoding='utf-8') print(entry, file=self.write_desc) def clear(self) -> None: if self.write_desc is not None: self.write_desc.close() self.write_desc = open(self.file_name, mode='w', encoding='utf-8') def close(self) -> None: if self.write_desc is not None: self.write_desc.close() def __getitem__(self, key): for explanation in self.entries(): if explanation.key == key: return explanation
c471a5d6421e32e9c6e3ca2db0f07eae45a85408
fuckit_commit.py
fuckit_commit.py
''' This module will send SMS reminders periodically, using Twilio. The aim is to encourage user to code, commit and push to GitHub everyday ''' import requests def set_configuration(): ''' Set Twilio configuration ''' pass def get_twilio_client(): ''' Connect to Twilio Client ''' pass def send_sms(): ''' Send SMS reminder ''' pass def main(): pass if __name__ == "__main__": main()
''' This module will send SMS reminders periodically, using Twilio. The aim is to encourage user to code, commit and push to GitHub everyday ''' import requests from twilio.rest import TwilioRestClient def get_configuration(): ''' Set Twilio configuration ''' pass def get_twilio_client(config): ''' Connect to Twilio Client ''' return TwilioRestClient(config.account_sid, config.auth_token) def send_sms(client): ''' Send SMS reminder ''' pass def main(): config = get_configuration() client = get_configuration(config) send_sms(client) if __name__ == "__main__": main()
Add code to send sms
Add code to send sms
Python
mit
ueg1990/fuckit_commit
''' This module will send SMS reminders periodically, using Twilio. The aim is to encourage user to code, commit and push to GitHub everyday ''' import requests def set_configuration(): ''' Set Twilio configuration ''' pass def get_twilio_client(): ''' Connect to Twilio Client ''' pass def send_sms(): ''' Send SMS reminder ''' pass def main(): pass if __name__ == "__main__": main() Add code to send sms
''' This module will send SMS reminders periodically, using Twilio. The aim is to encourage user to code, commit and push to GitHub everyday ''' import requests from twilio.rest import TwilioRestClient def get_configuration(): ''' Set Twilio configuration ''' pass def get_twilio_client(config): ''' Connect to Twilio Client ''' return TwilioRestClient(config.account_sid, config.auth_token) def send_sms(client): ''' Send SMS reminder ''' pass def main(): config = get_configuration() client = get_configuration(config) send_sms(client) if __name__ == "__main__": main()
<commit_before>''' This module will send SMS reminders periodically, using Twilio. The aim is to encourage user to code, commit and push to GitHub everyday ''' import requests def set_configuration(): ''' Set Twilio configuration ''' pass def get_twilio_client(): ''' Connect to Twilio Client ''' pass def send_sms(): ''' Send SMS reminder ''' pass def main(): pass if __name__ == "__main__": main() <commit_msg>Add code to send sms<commit_after>
''' This module will send SMS reminders periodically, using Twilio. The aim is to encourage user to code, commit and push to GitHub everyday ''' import requests from twilio.rest import TwilioRestClient def get_configuration(): ''' Set Twilio configuration ''' pass def get_twilio_client(config): ''' Connect to Twilio Client ''' return TwilioRestClient(config.account_sid, config.auth_token) def send_sms(client): ''' Send SMS reminder ''' pass def main(): config = get_configuration() client = get_configuration(config) send_sms(client) if __name__ == "__main__": main()
''' This module will send SMS reminders periodically, using Twilio. The aim is to encourage user to code, commit and push to GitHub everyday ''' import requests def set_configuration(): ''' Set Twilio configuration ''' pass def get_twilio_client(): ''' Connect to Twilio Client ''' pass def send_sms(): ''' Send SMS reminder ''' pass def main(): pass if __name__ == "__main__": main() Add code to send sms''' This module will send SMS reminders periodically, using Twilio. The aim is to encourage user to code, commit and push to GitHub everyday ''' import requests from twilio.rest import TwilioRestClient def get_configuration(): ''' Set Twilio configuration ''' pass def get_twilio_client(config): ''' Connect to Twilio Client ''' return TwilioRestClient(config.account_sid, config.auth_token) def send_sms(client): ''' Send SMS reminder ''' pass def main(): config = get_configuration() client = get_configuration(config) send_sms(client) if __name__ == "__main__": main()
<commit_before>''' This module will send SMS reminders periodically, using Twilio. The aim is to encourage user to code, commit and push to GitHub everyday ''' import requests def set_configuration(): ''' Set Twilio configuration ''' pass def get_twilio_client(): ''' Connect to Twilio Client ''' pass def send_sms(): ''' Send SMS reminder ''' pass def main(): pass if __name__ == "__main__": main() <commit_msg>Add code to send sms<commit_after>''' This module will send SMS reminders periodically, using Twilio. The aim is to encourage user to code, commit and push to GitHub everyday ''' import requests from twilio.rest import TwilioRestClient def get_configuration(): ''' Set Twilio configuration ''' pass def get_twilio_client(config): ''' Connect to Twilio Client ''' return TwilioRestClient(config.account_sid, config.auth_token) def send_sms(client): ''' Send SMS reminder ''' pass def main(): config = get_configuration() client = get_configuration(config) send_sms(client) if __name__ == "__main__": main()
048994463cda7df1bbaf502bef2bf84036e73403
i18n/loaders/python_loader.py
i18n/loaders/python_loader.py
import os.path import sys from .loader import Loader, I18nFileLoadError class PythonLoader(Loader): """class to load python files""" def __init__(self): super(PythonLoader, self).__init__() def load_file(self, filename): path, name = os.path.split(filename) module_name, ext = os.path.splitext(name) if path not in sys.path: sys.path.append(path) try: return __import__(module_name) except ImportError as e: raise I18nFileLoadError("error loading file {0}: {1}".format(filename, e.msg)) def parse_file(self, file_content): return file_content def check_data(self, data, root_data): return hasattr(data, root_data) def get_data(self, data, root_data): return getattr(data, root_data)
import os.path import sys from .loader import Loader, I18nFileLoadError class PythonLoader(Loader): """class to load python files""" def __init__(self): super(PythonLoader, self).__init__() def load_file(self, filename): path, name = os.path.split(filename) module_name, ext = os.path.splitext(name) if path not in sys.path: sys.path.append(path) try: return __import__(module_name) except ImportError: raise I18nFileLoadError("error loading file {0}".format(filename)) def parse_file(self, file_content): return file_content def check_data(self, data, root_data): return hasattr(data, root_data) def get_data(self, data, root_data): return getattr(data, root_data)
Fix bug in python loader.
Fix bug in python loader.
Python
mit
tuvistavie/python-i18n
import os.path import sys from .loader import Loader, I18nFileLoadError class PythonLoader(Loader): """class to load python files""" def __init__(self): super(PythonLoader, self).__init__() def load_file(self, filename): path, name = os.path.split(filename) module_name, ext = os.path.splitext(name) if path not in sys.path: sys.path.append(path) try: return __import__(module_name) except ImportError as e: raise I18nFileLoadError("error loading file {0}: {1}".format(filename, e.msg)) def parse_file(self, file_content): return file_content def check_data(self, data, root_data): return hasattr(data, root_data) def get_data(self, data, root_data): return getattr(data, root_data) Fix bug in python loader.
import os.path import sys from .loader import Loader, I18nFileLoadError class PythonLoader(Loader): """class to load python files""" def __init__(self): super(PythonLoader, self).__init__() def load_file(self, filename): path, name = os.path.split(filename) module_name, ext = os.path.splitext(name) if path not in sys.path: sys.path.append(path) try: return __import__(module_name) except ImportError: raise I18nFileLoadError("error loading file {0}".format(filename)) def parse_file(self, file_content): return file_content def check_data(self, data, root_data): return hasattr(data, root_data) def get_data(self, data, root_data): return getattr(data, root_data)
<commit_before>import os.path import sys from .loader import Loader, I18nFileLoadError class PythonLoader(Loader): """class to load python files""" def __init__(self): super(PythonLoader, self).__init__() def load_file(self, filename): path, name = os.path.split(filename) module_name, ext = os.path.splitext(name) if path not in sys.path: sys.path.append(path) try: return __import__(module_name) except ImportError as e: raise I18nFileLoadError("error loading file {0}: {1}".format(filename, e.msg)) def parse_file(self, file_content): return file_content def check_data(self, data, root_data): return hasattr(data, root_data) def get_data(self, data, root_data): return getattr(data, root_data) <commit_msg>Fix bug in python loader.<commit_after>
import os.path import sys from .loader import Loader, I18nFileLoadError class PythonLoader(Loader): """class to load python files""" def __init__(self): super(PythonLoader, self).__init__() def load_file(self, filename): path, name = os.path.split(filename) module_name, ext = os.path.splitext(name) if path not in sys.path: sys.path.append(path) try: return __import__(module_name) except ImportError: raise I18nFileLoadError("error loading file {0}".format(filename)) def parse_file(self, file_content): return file_content def check_data(self, data, root_data): return hasattr(data, root_data) def get_data(self, data, root_data): return getattr(data, root_data)
import os.path import sys from .loader import Loader, I18nFileLoadError class PythonLoader(Loader): """class to load python files""" def __init__(self): super(PythonLoader, self).__init__() def load_file(self, filename): path, name = os.path.split(filename) module_name, ext = os.path.splitext(name) if path not in sys.path: sys.path.append(path) try: return __import__(module_name) except ImportError as e: raise I18nFileLoadError("error loading file {0}: {1}".format(filename, e.msg)) def parse_file(self, file_content): return file_content def check_data(self, data, root_data): return hasattr(data, root_data) def get_data(self, data, root_data): return getattr(data, root_data) Fix bug in python loader.import os.path import sys from .loader import Loader, I18nFileLoadError class PythonLoader(Loader): """class to load python files""" def __init__(self): super(PythonLoader, self).__init__() def load_file(self, filename): path, name = os.path.split(filename) module_name, ext = os.path.splitext(name) if path not in sys.path: sys.path.append(path) try: return __import__(module_name) except ImportError: raise I18nFileLoadError("error loading file {0}".format(filename)) def parse_file(self, file_content): return file_content def check_data(self, data, root_data): return hasattr(data, root_data) def get_data(self, data, root_data): return getattr(data, root_data)
<commit_before>import os.path import sys from .loader import Loader, I18nFileLoadError class PythonLoader(Loader): """class to load python files""" def __init__(self): super(PythonLoader, self).__init__() def load_file(self, filename): path, name = os.path.split(filename) module_name, ext = os.path.splitext(name) if path not in sys.path: sys.path.append(path) try: return __import__(module_name) except ImportError as e: raise I18nFileLoadError("error loading file {0}: {1}".format(filename, e.msg)) def parse_file(self, file_content): return file_content def check_data(self, data, root_data): return hasattr(data, root_data) def get_data(self, data, root_data): return getattr(data, root_data) <commit_msg>Fix bug in python loader.<commit_after>import os.path import sys from .loader import Loader, I18nFileLoadError class PythonLoader(Loader): """class to load python files""" def __init__(self): super(PythonLoader, self).__init__() def load_file(self, filename): path, name = os.path.split(filename) module_name, ext = os.path.splitext(name) if path not in sys.path: sys.path.append(path) try: return __import__(module_name) except ImportError: raise I18nFileLoadError("error loading file {0}".format(filename)) def parse_file(self, file_content): return file_content def check_data(self, data, root_data): return hasattr(data, root_data) def get_data(self, data, root_data): return getattr(data, root_data)
7bc247550f136c5f0e34f411b868f9e5949e1ec4
api/tests/destinations/endpoint_tests.py
api/tests/destinations/endpoint_tests.py
import unittest from peewee import SqliteDatabase from playhouse.test_utils import test_database import api.tests.helpers as helpers from api.destinations.endpoint import * from api.destinations.endpoint import _get_destinations test_db = SqliteDatabase(':memory:') class DestinationsTests(unittest.TestCase): def setUp(self): self._all_stations = helpers.create_station_test_data() for station in self._all_stations: station.save(force_insert=True) def tearDown(self): Station.delete() def run(self, result=None): # All queries will be run in `test_db` with test_database(test_db, [Station]): super(DestinationsTests, self).run(result) def test_get_destinations_filters_zone(self): self.assertEqual(2, len(_get_destinations())) def test_get_destinations_filters_orders(self): self.assertEqual("BAR", _get_destinations()[0].sid)
from peewee import SqliteDatabase from api.destinations.endpoint import _get_destinations from api.tests.dbtestcase import DBTestCase test_db = SqliteDatabase(':memory:') class DestinationsTests(DBTestCase): def test_get_destinations_filters_zone(self): self.assertEqual(2, len(_get_destinations())) def test_get_destinations_filters_orders(self): self.assertEqual("BAR", _get_destinations()[0].sid)
Update destination endpoint tests to work with new version of peewee
Update destination endpoint tests to work with new version of peewee
Python
mit
mdowds/commutercalculator,mdowds/commutercalculator,mdowds/commutercalculator
import unittest from peewee import SqliteDatabase from playhouse.test_utils import test_database import api.tests.helpers as helpers from api.destinations.endpoint import * from api.destinations.endpoint import _get_destinations test_db = SqliteDatabase(':memory:') class DestinationsTests(unittest.TestCase): def setUp(self): self._all_stations = helpers.create_station_test_data() for station in self._all_stations: station.save(force_insert=True) def tearDown(self): Station.delete() def run(self, result=None): # All queries will be run in `test_db` with test_database(test_db, [Station]): super(DestinationsTests, self).run(result) def test_get_destinations_filters_zone(self): self.assertEqual(2, len(_get_destinations())) def test_get_destinations_filters_orders(self): self.assertEqual("BAR", _get_destinations()[0].sid) Update destination endpoint tests to work with new version of peewee
from peewee import SqliteDatabase from api.destinations.endpoint import _get_destinations from api.tests.dbtestcase import DBTestCase test_db = SqliteDatabase(':memory:') class DestinationsTests(DBTestCase): def test_get_destinations_filters_zone(self): self.assertEqual(2, len(_get_destinations())) def test_get_destinations_filters_orders(self): self.assertEqual("BAR", _get_destinations()[0].sid)
<commit_before>import unittest from peewee import SqliteDatabase from playhouse.test_utils import test_database import api.tests.helpers as helpers from api.destinations.endpoint import * from api.destinations.endpoint import _get_destinations test_db = SqliteDatabase(':memory:') class DestinationsTests(unittest.TestCase): def setUp(self): self._all_stations = helpers.create_station_test_data() for station in self._all_stations: station.save(force_insert=True) def tearDown(self): Station.delete() def run(self, result=None): # All queries will be run in `test_db` with test_database(test_db, [Station]): super(DestinationsTests, self).run(result) def test_get_destinations_filters_zone(self): self.assertEqual(2, len(_get_destinations())) def test_get_destinations_filters_orders(self): self.assertEqual("BAR", _get_destinations()[0].sid) <commit_msg>Update destination endpoint tests to work with new version of peewee<commit_after>
from peewee import SqliteDatabase from api.destinations.endpoint import _get_destinations from api.tests.dbtestcase import DBTestCase test_db = SqliteDatabase(':memory:') class DestinationsTests(DBTestCase): def test_get_destinations_filters_zone(self): self.assertEqual(2, len(_get_destinations())) def test_get_destinations_filters_orders(self): self.assertEqual("BAR", _get_destinations()[0].sid)
import unittest from peewee import SqliteDatabase from playhouse.test_utils import test_database import api.tests.helpers as helpers from api.destinations.endpoint import * from api.destinations.endpoint import _get_destinations test_db = SqliteDatabase(':memory:') class DestinationsTests(unittest.TestCase): def setUp(self): self._all_stations = helpers.create_station_test_data() for station in self._all_stations: station.save(force_insert=True) def tearDown(self): Station.delete() def run(self, result=None): # All queries will be run in `test_db` with test_database(test_db, [Station]): super(DestinationsTests, self).run(result) def test_get_destinations_filters_zone(self): self.assertEqual(2, len(_get_destinations())) def test_get_destinations_filters_orders(self): self.assertEqual("BAR", _get_destinations()[0].sid) Update destination endpoint tests to work with new version of peeweefrom peewee import SqliteDatabase from api.destinations.endpoint import _get_destinations from api.tests.dbtestcase import DBTestCase test_db = SqliteDatabase(':memory:') class DestinationsTests(DBTestCase): def test_get_destinations_filters_zone(self): self.assertEqual(2, len(_get_destinations())) def test_get_destinations_filters_orders(self): self.assertEqual("BAR", _get_destinations()[0].sid)
<commit_before>import unittest from peewee import SqliteDatabase from playhouse.test_utils import test_database import api.tests.helpers as helpers from api.destinations.endpoint import * from api.destinations.endpoint import _get_destinations test_db = SqliteDatabase(':memory:') class DestinationsTests(unittest.TestCase): def setUp(self): self._all_stations = helpers.create_station_test_data() for station in self._all_stations: station.save(force_insert=True) def tearDown(self): Station.delete() def run(self, result=None): # All queries will be run in `test_db` with test_database(test_db, [Station]): super(DestinationsTests, self).run(result) def test_get_destinations_filters_zone(self): self.assertEqual(2, len(_get_destinations())) def test_get_destinations_filters_orders(self): self.assertEqual("BAR", _get_destinations()[0].sid) <commit_msg>Update destination endpoint tests to work with new version of peewee<commit_after>from peewee import SqliteDatabase from api.destinations.endpoint import _get_destinations from api.tests.dbtestcase import DBTestCase test_db = SqliteDatabase(':memory:') class DestinationsTests(DBTestCase): def test_get_destinations_filters_zone(self): self.assertEqual(2, len(_get_destinations())) def test_get_destinations_filters_orders(self): self.assertEqual("BAR", _get_destinations()[0].sid)
0e14aaba251e3257bc298561a9004d88e0d8e3b6
turbasen/__init__.py
turbasen/__init__.py
# encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import \ Gruppe, \ Omrade, \ Sted # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event
# encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import ( Gruppe, Omrade, Sted, ) # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event
Apply parentheses for group import
Apply parentheses for group import
Python
mit
Turbasen/turbasen.py
# encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import \ Gruppe, \ Omrade, \ Sted # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event Apply parentheses for group import
# encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import ( Gruppe, Omrade, Sted, ) # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event
<commit_before># encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import \ Gruppe, \ Omrade, \ Sted # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event <commit_msg>Apply parentheses for group import<commit_after>
# encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import ( Gruppe, Omrade, Sted, ) # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event
# encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import \ Gruppe, \ Omrade, \ Sted # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event Apply parentheses for group import# encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import ( Gruppe, Omrade, Sted, ) # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event
<commit_before># encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import \ Gruppe, \ Omrade, \ Sted # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event <commit_msg>Apply parentheses for group import<commit_after># encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals # Import the models we want directly available through the root module from .models import ( Gruppe, Omrade, Sted, ) # Make configure directly available through the root module from .settings import configure # Make handle_available directly available through the root module from .events import handle_event
4f9569037ad835b852e6389b082155d45a88774c
kokki/cookbooks/nginx/recipes/default.py
kokki/cookbooks/nginx/recipes/default.py
from kokki import * Package("nginx") Directory(env.config.nginx.log_dir, mode = 0755, owner = env.config.nginx.user, action = 'create') for nxscript in ('nxensite', 'nxdissite'): File("/usr/sbin/%s" % nxscript, content = Template("nginx/%s.j2" % nxscript), mode = 0755, owner = "root", group = "root") File("nginx.conf", path = "%s/nginx.conf" % env.config.nginx.dir, content = Template("nginx/nginx.conf.j2"), owner = "root", group = "root", mode = 0644) File("%s/sites-available/default" % env.config.nginx.dir, content = Template("nginx/default-site.j2"), owner = "root", group = "root", mode = 0644) Service("nginx", supports_status = True, supports_restart = True, supports_reload = True, action = "start", subscribes = [("reload", env.resources["File"]["nginx.conf"])])
from kokki import * Package("nginx") Directory(env.config.nginx.log_dir, mode = 0755, owner = env.config.nginx.user, action = 'create') for nxscript in ('nxensite', 'nxdissite'): File("/usr/sbin/%s" % nxscript, content = Template("nginx/%s.j2" % nxscript), mode = 0755, owner = "root", group = "root") File("nginx.conf", path = "%s/nginx.conf" % env.config.nginx.dir, content = Template("nginx/nginx.conf.j2"), owner = "root", group = "root", mode = 0644) File("%s/sites-available/default" % env.config.nginx.dir, content = Template("nginx/default-site.j2"), owner = "root", group = "root", mode = 0644) Service("nginx", supports_status = True, supports_restart = True, supports_reload = True, action = "start", subscribes = [("reload", env.resources["File"]["nginx.conf"])]) if "librato.silverline" in env.included_recipes: File("/etc/default/nginx", owner = "root", gorup = "root", mode = 0644, content = ( "export LM_CONTAINER_NAME=nginx\n" "export LM_TAG_NAMES=nginx:webserver:frontend\n" ), notifies = [("restart", env.resources["Service"]["nginx"])])
Add silverline environment to nginx
Add silverline environment to nginx
Python
bsd-3-clause
samuel/kokki
from kokki import * Package("nginx") Directory(env.config.nginx.log_dir, mode = 0755, owner = env.config.nginx.user, action = 'create') for nxscript in ('nxensite', 'nxdissite'): File("/usr/sbin/%s" % nxscript, content = Template("nginx/%s.j2" % nxscript), mode = 0755, owner = "root", group = "root") File("nginx.conf", path = "%s/nginx.conf" % env.config.nginx.dir, content = Template("nginx/nginx.conf.j2"), owner = "root", group = "root", mode = 0644) File("%s/sites-available/default" % env.config.nginx.dir, content = Template("nginx/default-site.j2"), owner = "root", group = "root", mode = 0644) Service("nginx", supports_status = True, supports_restart = True, supports_reload = True, action = "start", subscribes = [("reload", env.resources["File"]["nginx.conf"])]) Add silverline environment to nginx
from kokki import * Package("nginx") Directory(env.config.nginx.log_dir, mode = 0755, owner = env.config.nginx.user, action = 'create') for nxscript in ('nxensite', 'nxdissite'): File("/usr/sbin/%s" % nxscript, content = Template("nginx/%s.j2" % nxscript), mode = 0755, owner = "root", group = "root") File("nginx.conf", path = "%s/nginx.conf" % env.config.nginx.dir, content = Template("nginx/nginx.conf.j2"), owner = "root", group = "root", mode = 0644) File("%s/sites-available/default" % env.config.nginx.dir, content = Template("nginx/default-site.j2"), owner = "root", group = "root", mode = 0644) Service("nginx", supports_status = True, supports_restart = True, supports_reload = True, action = "start", subscribes = [("reload", env.resources["File"]["nginx.conf"])]) if "librato.silverline" in env.included_recipes: File("/etc/default/nginx", owner = "root", gorup = "root", mode = 0644, content = ( "export LM_CONTAINER_NAME=nginx\n" "export LM_TAG_NAMES=nginx:webserver:frontend\n" ), notifies = [("restart", env.resources["Service"]["nginx"])])
<commit_before> from kokki import * Package("nginx") Directory(env.config.nginx.log_dir, mode = 0755, owner = env.config.nginx.user, action = 'create') for nxscript in ('nxensite', 'nxdissite'): File("/usr/sbin/%s" % nxscript, content = Template("nginx/%s.j2" % nxscript), mode = 0755, owner = "root", group = "root") File("nginx.conf", path = "%s/nginx.conf" % env.config.nginx.dir, content = Template("nginx/nginx.conf.j2"), owner = "root", group = "root", mode = 0644) File("%s/sites-available/default" % env.config.nginx.dir, content = Template("nginx/default-site.j2"), owner = "root", group = "root", mode = 0644) Service("nginx", supports_status = True, supports_restart = True, supports_reload = True, action = "start", subscribes = [("reload", env.resources["File"]["nginx.conf"])]) <commit_msg>Add silverline environment to nginx<commit_after>
from kokki import * Package("nginx") Directory(env.config.nginx.log_dir, mode = 0755, owner = env.config.nginx.user, action = 'create') for nxscript in ('nxensite', 'nxdissite'): File("/usr/sbin/%s" % nxscript, content = Template("nginx/%s.j2" % nxscript), mode = 0755, owner = "root", group = "root") File("nginx.conf", path = "%s/nginx.conf" % env.config.nginx.dir, content = Template("nginx/nginx.conf.j2"), owner = "root", group = "root", mode = 0644) File("%s/sites-available/default" % env.config.nginx.dir, content = Template("nginx/default-site.j2"), owner = "root", group = "root", mode = 0644) Service("nginx", supports_status = True, supports_restart = True, supports_reload = True, action = "start", subscribes = [("reload", env.resources["File"]["nginx.conf"])]) if "librato.silverline" in env.included_recipes: File("/etc/default/nginx", owner = "root", gorup = "root", mode = 0644, content = ( "export LM_CONTAINER_NAME=nginx\n" "export LM_TAG_NAMES=nginx:webserver:frontend\n" ), notifies = [("restart", env.resources["Service"]["nginx"])])
from kokki import * Package("nginx") Directory(env.config.nginx.log_dir, mode = 0755, owner = env.config.nginx.user, action = 'create') for nxscript in ('nxensite', 'nxdissite'): File("/usr/sbin/%s" % nxscript, content = Template("nginx/%s.j2" % nxscript), mode = 0755, owner = "root", group = "root") File("nginx.conf", path = "%s/nginx.conf" % env.config.nginx.dir, content = Template("nginx/nginx.conf.j2"), owner = "root", group = "root", mode = 0644) File("%s/sites-available/default" % env.config.nginx.dir, content = Template("nginx/default-site.j2"), owner = "root", group = "root", mode = 0644) Service("nginx", supports_status = True, supports_restart = True, supports_reload = True, action = "start", subscribes = [("reload", env.resources["File"]["nginx.conf"])]) Add silverline environment to nginx from kokki import * Package("nginx") Directory(env.config.nginx.log_dir, mode = 0755, owner = env.config.nginx.user, action = 'create') for nxscript in ('nxensite', 'nxdissite'): File("/usr/sbin/%s" % nxscript, content = Template("nginx/%s.j2" % nxscript), mode = 0755, owner = "root", group = "root") File("nginx.conf", path = "%s/nginx.conf" % env.config.nginx.dir, content = Template("nginx/nginx.conf.j2"), owner = "root", group = "root", mode = 0644) File("%s/sites-available/default" % env.config.nginx.dir, content = Template("nginx/default-site.j2"), owner = "root", group = "root", mode = 0644) Service("nginx", supports_status = True, supports_restart = True, supports_reload = True, action = "start", subscribes = [("reload", env.resources["File"]["nginx.conf"])]) if "librato.silverline" in env.included_recipes: File("/etc/default/nginx", owner = "root", gorup = "root", mode = 0644, content = ( "export LM_CONTAINER_NAME=nginx\n" "export LM_TAG_NAMES=nginx:webserver:frontend\n" ), notifies = [("restart", env.resources["Service"]["nginx"])])
<commit_before> from kokki import * Package("nginx") Directory(env.config.nginx.log_dir, mode = 0755, owner = env.config.nginx.user, action = 'create') for nxscript in ('nxensite', 'nxdissite'): File("/usr/sbin/%s" % nxscript, content = Template("nginx/%s.j2" % nxscript), mode = 0755, owner = "root", group = "root") File("nginx.conf", path = "%s/nginx.conf" % env.config.nginx.dir, content = Template("nginx/nginx.conf.j2"), owner = "root", group = "root", mode = 0644) File("%s/sites-available/default" % env.config.nginx.dir, content = Template("nginx/default-site.j2"), owner = "root", group = "root", mode = 0644) Service("nginx", supports_status = True, supports_restart = True, supports_reload = True, action = "start", subscribes = [("reload", env.resources["File"]["nginx.conf"])]) <commit_msg>Add silverline environment to nginx<commit_after> from kokki import * Package("nginx") Directory(env.config.nginx.log_dir, mode = 0755, owner = env.config.nginx.user, action = 'create') for nxscript in ('nxensite', 'nxdissite'): File("/usr/sbin/%s" % nxscript, content = Template("nginx/%s.j2" % nxscript), mode = 0755, owner = "root", group = "root") File("nginx.conf", path = "%s/nginx.conf" % env.config.nginx.dir, content = Template("nginx/nginx.conf.j2"), owner = "root", group = "root", mode = 0644) File("%s/sites-available/default" % env.config.nginx.dir, content = Template("nginx/default-site.j2"), owner = "root", group = "root", mode = 0644) Service("nginx", supports_status = True, supports_restart = True, supports_reload = True, action = "start", subscribes = [("reload", env.resources["File"]["nginx.conf"])]) if "librato.silverline" in env.included_recipes: File("/etc/default/nginx", owner = "root", gorup = "root", mode = 0644, content = ( "export LM_CONTAINER_NAME=nginx\n" "export LM_TAG_NAMES=nginx:webserver:frontend\n" ), notifies = [("restart", env.resources["Service"]["nginx"])])
c1889a71be161400a42ad6b7c72b2559a84f69bf
src/nodeconductor_assembly_waldur/invoices/tests/test_report.py
src/nodeconductor_assembly_waldur/invoices/tests/test_report.py
from django.test import TestCase from nodeconductor_assembly_waldur.invoices.tasks import format_invoice_csv from .. import models from . import fixtures class TestReportFormatter(TestCase): def test_invoice_items_are_properly_formatted(self): fixture = fixtures.InvoiceFixture() package = fixture.openstack_package invoice = models.Invoice.objects.get(customer=package.tenant.service_project_link.project.customer) report = format_invoice_csv(invoice) self.assertEqual(2, len(report.splitlines()))
from django.test import TestCase from nodeconductor_assembly_waldur.invoices.tasks import format_invoice_csv from .. import models from . import fixtures class TestReportFormatter(TestCase): def setUp(self): fixture = fixtures.InvoiceFixture() package = fixture.openstack_package invoice = models.Invoice.objects.get(customer=package.tenant.service_project_link.project.customer) self.invoice = invoice def test_invoice_items_are_properly_formatted(self): report = format_invoice_csv(self.invoice) lines = report.splitlines() self.assertEqual(2, len(lines)) expected_header = 'customer_uuid;customer_name;project_uuid;project_name;' \ 'invoice_uuid;invoice_number;invoice_year;invoice_month;' \ 'invoice_date;due_date;invoice_price;invoice_tax;' \ 'invoice_total;name;article_code;product_code;' \ 'price;tax;total;daily_price;start;end;usage_days' self.assertEqual(lines[0], expected_header)
Add unit test for report formatter
Add unit test for report formatter [WAL-905]
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur
from django.test import TestCase from nodeconductor_assembly_waldur.invoices.tasks import format_invoice_csv from .. import models from . import fixtures class TestReportFormatter(TestCase): def test_invoice_items_are_properly_formatted(self): fixture = fixtures.InvoiceFixture() package = fixture.openstack_package invoice = models.Invoice.objects.get(customer=package.tenant.service_project_link.project.customer) report = format_invoice_csv(invoice) self.assertEqual(2, len(report.splitlines())) Add unit test for report formatter [WAL-905]
from django.test import TestCase from nodeconductor_assembly_waldur.invoices.tasks import format_invoice_csv from .. import models from . import fixtures class TestReportFormatter(TestCase): def setUp(self): fixture = fixtures.InvoiceFixture() package = fixture.openstack_package invoice = models.Invoice.objects.get(customer=package.tenant.service_project_link.project.customer) self.invoice = invoice def test_invoice_items_are_properly_formatted(self): report = format_invoice_csv(self.invoice) lines = report.splitlines() self.assertEqual(2, len(lines)) expected_header = 'customer_uuid;customer_name;project_uuid;project_name;' \ 'invoice_uuid;invoice_number;invoice_year;invoice_month;' \ 'invoice_date;due_date;invoice_price;invoice_tax;' \ 'invoice_total;name;article_code;product_code;' \ 'price;tax;total;daily_price;start;end;usage_days' self.assertEqual(lines[0], expected_header)
<commit_before>from django.test import TestCase from nodeconductor_assembly_waldur.invoices.tasks import format_invoice_csv from .. import models from . import fixtures class TestReportFormatter(TestCase): def test_invoice_items_are_properly_formatted(self): fixture = fixtures.InvoiceFixture() package = fixture.openstack_package invoice = models.Invoice.objects.get(customer=package.tenant.service_project_link.project.customer) report = format_invoice_csv(invoice) self.assertEqual(2, len(report.splitlines())) <commit_msg>Add unit test for report formatter [WAL-905]<commit_after>
from django.test import TestCase from nodeconductor_assembly_waldur.invoices.tasks import format_invoice_csv from .. import models from . import fixtures class TestReportFormatter(TestCase): def setUp(self): fixture = fixtures.InvoiceFixture() package = fixture.openstack_package invoice = models.Invoice.objects.get(customer=package.tenant.service_project_link.project.customer) self.invoice = invoice def test_invoice_items_are_properly_formatted(self): report = format_invoice_csv(self.invoice) lines = report.splitlines() self.assertEqual(2, len(lines)) expected_header = 'customer_uuid;customer_name;project_uuid;project_name;' \ 'invoice_uuid;invoice_number;invoice_year;invoice_month;' \ 'invoice_date;due_date;invoice_price;invoice_tax;' \ 'invoice_total;name;article_code;product_code;' \ 'price;tax;total;daily_price;start;end;usage_days' self.assertEqual(lines[0], expected_header)
from django.test import TestCase from nodeconductor_assembly_waldur.invoices.tasks import format_invoice_csv from .. import models from . import fixtures class TestReportFormatter(TestCase): def test_invoice_items_are_properly_formatted(self): fixture = fixtures.InvoiceFixture() package = fixture.openstack_package invoice = models.Invoice.objects.get(customer=package.tenant.service_project_link.project.customer) report = format_invoice_csv(invoice) self.assertEqual(2, len(report.splitlines())) Add unit test for report formatter [WAL-905]from django.test import TestCase from nodeconductor_assembly_waldur.invoices.tasks import format_invoice_csv from .. import models from . import fixtures class TestReportFormatter(TestCase): def setUp(self): fixture = fixtures.InvoiceFixture() package = fixture.openstack_package invoice = models.Invoice.objects.get(customer=package.tenant.service_project_link.project.customer) self.invoice = invoice def test_invoice_items_are_properly_formatted(self): report = format_invoice_csv(self.invoice) lines = report.splitlines() self.assertEqual(2, len(lines)) expected_header = 'customer_uuid;customer_name;project_uuid;project_name;' \ 'invoice_uuid;invoice_number;invoice_year;invoice_month;' \ 'invoice_date;due_date;invoice_price;invoice_tax;' \ 'invoice_total;name;article_code;product_code;' \ 'price;tax;total;daily_price;start;end;usage_days' self.assertEqual(lines[0], expected_header)
<commit_before>from django.test import TestCase from nodeconductor_assembly_waldur.invoices.tasks import format_invoice_csv from .. import models from . import fixtures class TestReportFormatter(TestCase): def test_invoice_items_are_properly_formatted(self): fixture = fixtures.InvoiceFixture() package = fixture.openstack_package invoice = models.Invoice.objects.get(customer=package.tenant.service_project_link.project.customer) report = format_invoice_csv(invoice) self.assertEqual(2, len(report.splitlines())) <commit_msg>Add unit test for report formatter [WAL-905]<commit_after>from django.test import TestCase from nodeconductor_assembly_waldur.invoices.tasks import format_invoice_csv from .. import models from . import fixtures class TestReportFormatter(TestCase): def setUp(self): fixture = fixtures.InvoiceFixture() package = fixture.openstack_package invoice = models.Invoice.objects.get(customer=package.tenant.service_project_link.project.customer) self.invoice = invoice def test_invoice_items_are_properly_formatted(self): report = format_invoice_csv(self.invoice) lines = report.splitlines() self.assertEqual(2, len(lines)) expected_header = 'customer_uuid;customer_name;project_uuid;project_name;' \ 'invoice_uuid;invoice_number;invoice_year;invoice_month;' \ 'invoice_date;due_date;invoice_price;invoice_tax;' \ 'invoice_total;name;article_code;product_code;' \ 'price;tax;total;daily_price;start;end;usage_days' self.assertEqual(lines[0], expected_header)
11c30f5dd765475a9f5f0f847f31c47af8c40a39
user_agent/device.py
user_agent/device.py
import os.path import json PACKAGE_DIR = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(PACKAGE_DIR, 'data/smartphone_dev_id.json')) as f: SMARTPHONE_DEV_IDS = json.load(open(f)) with open(os.path.join(PACKAGE_DIR, 'data/tablet_dev_id.json')) as f: TABLET_DEV_IDS = json.load(open())
import os.path import json PACKAGE_DIR = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(PACKAGE_DIR, 'data/smartphone_dev_id.json')) as f: SMARTPHONE_DEV_IDS = json.load(f) with open(os.path.join(PACKAGE_DIR, 'data/tablet_dev_id.json')) as f: TABLET_DEV_IDS = json.load(f)
Fix uses of file objects
Fix uses of file objects
Python
mit
lorien/user_agent
import os.path import json PACKAGE_DIR = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(PACKAGE_DIR, 'data/smartphone_dev_id.json')) as f: SMARTPHONE_DEV_IDS = json.load(open(f)) with open(os.path.join(PACKAGE_DIR, 'data/tablet_dev_id.json')) as f: TABLET_DEV_IDS = json.load(open()) Fix uses of file objects
import os.path import json PACKAGE_DIR = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(PACKAGE_DIR, 'data/smartphone_dev_id.json')) as f: SMARTPHONE_DEV_IDS = json.load(f) with open(os.path.join(PACKAGE_DIR, 'data/tablet_dev_id.json')) as f: TABLET_DEV_IDS = json.load(f)
<commit_before>import os.path import json PACKAGE_DIR = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(PACKAGE_DIR, 'data/smartphone_dev_id.json')) as f: SMARTPHONE_DEV_IDS = json.load(open(f)) with open(os.path.join(PACKAGE_DIR, 'data/tablet_dev_id.json')) as f: TABLET_DEV_IDS = json.load(open()) <commit_msg>Fix uses of file objects<commit_after>
import os.path import json PACKAGE_DIR = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(PACKAGE_DIR, 'data/smartphone_dev_id.json')) as f: SMARTPHONE_DEV_IDS = json.load(f) with open(os.path.join(PACKAGE_DIR, 'data/tablet_dev_id.json')) as f: TABLET_DEV_IDS = json.load(f)
import os.path import json PACKAGE_DIR = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(PACKAGE_DIR, 'data/smartphone_dev_id.json')) as f: SMARTPHONE_DEV_IDS = json.load(open(f)) with open(os.path.join(PACKAGE_DIR, 'data/tablet_dev_id.json')) as f: TABLET_DEV_IDS = json.load(open()) Fix uses of file objectsimport os.path import json PACKAGE_DIR = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(PACKAGE_DIR, 'data/smartphone_dev_id.json')) as f: SMARTPHONE_DEV_IDS = json.load(f) with open(os.path.join(PACKAGE_DIR, 'data/tablet_dev_id.json')) as f: TABLET_DEV_IDS = json.load(f)
<commit_before>import os.path import json PACKAGE_DIR = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(PACKAGE_DIR, 'data/smartphone_dev_id.json')) as f: SMARTPHONE_DEV_IDS = json.load(open(f)) with open(os.path.join(PACKAGE_DIR, 'data/tablet_dev_id.json')) as f: TABLET_DEV_IDS = json.load(open()) <commit_msg>Fix uses of file objects<commit_after>import os.path import json PACKAGE_DIR = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(PACKAGE_DIR, 'data/smartphone_dev_id.json')) as f: SMARTPHONE_DEV_IDS = json.load(f) with open(os.path.join(PACKAGE_DIR, 'data/tablet_dev_id.json')) as f: TABLET_DEV_IDS = json.load(f)
2158edb92cba6c19fa258f19445191d0308c4153
utils/async_tasks.py
utils/async_tasks.py
from utils.redis_store import store def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. if elapsed_time > refresh_time: task_func.delay(store_key, *task_args, **task_kwargs) return output
from utils.redis_store import store from celery.signals import task_postrun, task_prerun def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60, run_once=True): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. # If run_once=True, we only trigger the recompute if the task is not already running if elapsed_time > refresh_time: if run_once: # Check that it is not already running computing_store_key = 'computing-{0}.{1}'.format(task_func.__module__, task_func.__name__) if store.get(computing_store_key): # Task is already running, don't trigger running again print('Skip computing data for {0}, already running'.format(store_key)) return output task_func.delay(store_key, *task_args, **task_kwargs) return output @task_prerun.connect() def task_prerun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None): # Set computing key computing_store_key = 'computing-{0}'.format(task.name) store.set(computing_store_key, {'running': True}) @task_postrun.connect() def task_postrun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None, retval=None, state=None): # Delete computing key (if present) computing_store_key = 'computing-{0}'.format(task.name) store.delete(computing_store_key)
Add option to run async tasks only on at a time
Add option to run async tasks only on at a time This is implemented with a simple lock like mechanism using redis.
Python
agpl-3.0
MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets
from utils.redis_store import store def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. if elapsed_time > refresh_time: task_func.delay(store_key, *task_args, **task_kwargs) return output Add option to run async tasks only on at a time This is implemented with a simple lock like mechanism using redis.
from utils.redis_store import store from celery.signals import task_postrun, task_prerun def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60, run_once=True): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. # If run_once=True, we only trigger the recompute if the task is not already running if elapsed_time > refresh_time: if run_once: # Check that it is not already running computing_store_key = 'computing-{0}.{1}'.format(task_func.__module__, task_func.__name__) if store.get(computing_store_key): # Task is already running, don't trigger running again print('Skip computing data for {0}, already running'.format(store_key)) return output task_func.delay(store_key, *task_args, **task_kwargs) return output @task_prerun.connect() def task_prerun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None): # Set computing key computing_store_key = 'computing-{0}'.format(task.name) store.set(computing_store_key, {'running': True}) @task_postrun.connect() def task_postrun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None, retval=None, state=None): # Delete computing key (if present) computing_store_key = 'computing-{0}'.format(task.name) store.delete(computing_store_key)
<commit_before>from utils.redis_store import store def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. if elapsed_time > refresh_time: task_func.delay(store_key, *task_args, **task_kwargs) return output <commit_msg>Add option to run async tasks only on at a time This is implemented with a simple lock like mechanism using redis.<commit_after>
from utils.redis_store import store from celery.signals import task_postrun, task_prerun def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60, run_once=True): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. # If run_once=True, we only trigger the recompute if the task is not already running if elapsed_time > refresh_time: if run_once: # Check that it is not already running computing_store_key = 'computing-{0}.{1}'.format(task_func.__module__, task_func.__name__) if store.get(computing_store_key): # Task is already running, don't trigger running again print('Skip computing data for {0}, already running'.format(store_key)) return output task_func.delay(store_key, *task_args, **task_kwargs) return output @task_prerun.connect() def task_prerun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None): # Set computing key computing_store_key = 'computing-{0}'.format(task.name) store.set(computing_store_key, {'running': True}) @task_postrun.connect() def task_postrun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None, retval=None, state=None): # Delete computing key (if present) computing_store_key = 'computing-{0}'.format(task.name) store.delete(computing_store_key)
from utils.redis_store import store def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. if elapsed_time > refresh_time: task_func.delay(store_key, *task_args, **task_kwargs) return output Add option to run async tasks only on at a time This is implemented with a simple lock like mechanism using redis.from utils.redis_store import store from celery.signals import task_postrun, task_prerun def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60, run_once=True): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. # If run_once=True, we only trigger the recompute if the task is not already running if elapsed_time > refresh_time: if run_once: # Check that it is not already running computing_store_key = 'computing-{0}.{1}'.format(task_func.__module__, task_func.__name__) if store.get(computing_store_key): # Task is already running, don't trigger running again print('Skip computing data for {0}, already running'.format(store_key)) return output task_func.delay(store_key, *task_args, **task_kwargs) return output @task_prerun.connect() def task_prerun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None): # Set computing key computing_store_key = 'computing-{0}'.format(task.name) store.set(computing_store_key, {'running': True}) @task_postrun.connect() def task_postrun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None, retval=None, state=None): # Delete computing key (if present) computing_store_key = 'computing-{0}'.format(task.name) store.delete(computing_store_key)
<commit_before>from utils.redis_store import store def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. if elapsed_time > refresh_time: task_func.delay(store_key, *task_args, **task_kwargs) return output <commit_msg>Add option to run async tasks only on at a time This is implemented with a simple lock like mechanism using redis.<commit_after>from utils.redis_store import store from celery.signals import task_postrun, task_prerun def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60, run_once=True): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. # If run_once=True, we only trigger the recompute if the task is not already running if elapsed_time > refresh_time: if run_once: # Check that it is not already running computing_store_key = 'computing-{0}.{1}'.format(task_func.__module__, task_func.__name__) if store.get(computing_store_key): # Task is already running, don't trigger running again print('Skip computing data for {0}, already running'.format(store_key)) return output task_func.delay(store_key, *task_args, **task_kwargs) return output @task_prerun.connect() def task_prerun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None): # Set computing key computing_store_key = 'computing-{0}'.format(task.name) store.set(computing_store_key, {'running': True}) @task_postrun.connect() def task_postrun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None, retval=None, state=None): # Delete computing key (if present) computing_store_key = 'computing-{0}'.format(task.name) store.delete(computing_store_key)
43e86a3ac5f63c702ce409c45e3aaaac60990fe9
python/ensure_haddock_coverage.py
python/ensure_haddock_coverage.py
#!/usr/bin/env python """ Tiny utility script to check that coverage statistics output by stack haddock are all 100%. """ import sys import re def main(): """Entry point to ensure-haddock-coverage.py.""" # Verify that the number of arguments is correct. if len(sys.argv) != 2: print("Usage: ./ensure-haddock-coverage.py $HADDOCK_OUT", file=sys.stderr) sys.exit(1) # Read contents of input file. filename = sys.argv[1] with open(filename, "r") as handle: contents = handle.read() # Find all coverage statistics. stats = [] for line in contents.split("\n"): pat = " ([0-9]*)% \\([ 0-9]* / [ 0-9]*\\) in '([a-zA-Z.0-9]*)'" match = re.search(pat, line) if match is not None: stats.append((int(match.group(1)), match.group(2))) insufficient_coverage = False for coverage, name in stats: if coverage != 100: print("Insufficient Haddock Coverage on {}: {}" .format(name, coverage)) insufficient_coverage = True if insufficient_coverage: sys.exit(1) if __name__ == '__main__': main()
#!/usr/bin/env python """ Tiny utility script to check that coverage statistics output by stack haddock are all 100%. """ import sys import re def main(): """Entry point to ensure-haddock-coverage.py.""" # Verify that the number of arguments is correct. if len(sys.argv) != 2: print("Usage: ./ensure-haddock-coverage.py $HADDOCK_OUT", file=sys.stderr) sys.exit(1) # Read contents of input file. filename = sys.argv[1] with open(filename, "r") as handle: contents = handle.read() # Find all coverage statistics. stats = [] for line in contents.split("\n"): pat = " ([0-9]*)% \\([ 0-9]* / [ 0-9]*\\) in '([a-zA-Z.0-9]*)'" match = re.search(pat, line) if match is not None: stats.append((int(match.group(1)), match.group(2))) insufficient_coverage = False for coverage, name in stats: if coverage != 100: print("Insufficient Haddock Coverage on {}: {}" .format(name, coverage)) insufficient_coverage = True if len(stats) < 8: print(("Expecting at least 8 Haddock-covered modules.\n" "Possibly Haddock output nothing, or number of modules " "has decreased.\nIf number of modules has decreased, edit " "ensure_haddock_coverage.py to be up to date."), file=sys.stderr) insufficient_coverage = True if insufficient_coverage: sys.exit(1) if __name__ == '__main__': main()
Check for number of modules in haddock-checking script
Check for number of modules in haddock-checking script
Python
mit
gibiansky/jupyter-haskell
#!/usr/bin/env python """ Tiny utility script to check that coverage statistics output by stack haddock are all 100%. """ import sys import re def main(): """Entry point to ensure-haddock-coverage.py.""" # Verify that the number of arguments is correct. if len(sys.argv) != 2: print("Usage: ./ensure-haddock-coverage.py $HADDOCK_OUT", file=sys.stderr) sys.exit(1) # Read contents of input file. filename = sys.argv[1] with open(filename, "r") as handle: contents = handle.read() # Find all coverage statistics. stats = [] for line in contents.split("\n"): pat = " ([0-9]*)% \\([ 0-9]* / [ 0-9]*\\) in '([a-zA-Z.0-9]*)'" match = re.search(pat, line) if match is not None: stats.append((int(match.group(1)), match.group(2))) insufficient_coverage = False for coverage, name in stats: if coverage != 100: print("Insufficient Haddock Coverage on {}: {}" .format(name, coverage)) insufficient_coverage = True if insufficient_coverage: sys.exit(1) if __name__ == '__main__': main() Check for number of modules in haddock-checking script
#!/usr/bin/env python """ Tiny utility script to check that coverage statistics output by stack haddock are all 100%. """ import sys import re def main(): """Entry point to ensure-haddock-coverage.py.""" # Verify that the number of arguments is correct. if len(sys.argv) != 2: print("Usage: ./ensure-haddock-coverage.py $HADDOCK_OUT", file=sys.stderr) sys.exit(1) # Read contents of input file. filename = sys.argv[1] with open(filename, "r") as handle: contents = handle.read() # Find all coverage statistics. stats = [] for line in contents.split("\n"): pat = " ([0-9]*)% \\([ 0-9]* / [ 0-9]*\\) in '([a-zA-Z.0-9]*)'" match = re.search(pat, line) if match is not None: stats.append((int(match.group(1)), match.group(2))) insufficient_coverage = False for coverage, name in stats: if coverage != 100: print("Insufficient Haddock Coverage on {}: {}" .format(name, coverage)) insufficient_coverage = True if len(stats) < 8: print(("Expecting at least 8 Haddock-covered modules.\n" "Possibly Haddock output nothing, or number of modules " "has decreased.\nIf number of modules has decreased, edit " "ensure_haddock_coverage.py to be up to date."), file=sys.stderr) insufficient_coverage = True if insufficient_coverage: sys.exit(1) if __name__ == '__main__': main()
<commit_before>#!/usr/bin/env python """ Tiny utility script to check that coverage statistics output by stack haddock are all 100%. """ import sys import re def main(): """Entry point to ensure-haddock-coverage.py.""" # Verify that the number of arguments is correct. if len(sys.argv) != 2: print("Usage: ./ensure-haddock-coverage.py $HADDOCK_OUT", file=sys.stderr) sys.exit(1) # Read contents of input file. filename = sys.argv[1] with open(filename, "r") as handle: contents = handle.read() # Find all coverage statistics. stats = [] for line in contents.split("\n"): pat = " ([0-9]*)% \\([ 0-9]* / [ 0-9]*\\) in '([a-zA-Z.0-9]*)'" match = re.search(pat, line) if match is not None: stats.append((int(match.group(1)), match.group(2))) insufficient_coverage = False for coverage, name in stats: if coverage != 100: print("Insufficient Haddock Coverage on {}: {}" .format(name, coverage)) insufficient_coverage = True if insufficient_coverage: sys.exit(1) if __name__ == '__main__': main() <commit_msg>Check for number of modules in haddock-checking script<commit_after>
#!/usr/bin/env python """ Tiny utility script to check that coverage statistics output by stack haddock are all 100%. """ import sys import re def main(): """Entry point to ensure-haddock-coverage.py.""" # Verify that the number of arguments is correct. if len(sys.argv) != 2: print("Usage: ./ensure-haddock-coverage.py $HADDOCK_OUT", file=sys.stderr) sys.exit(1) # Read contents of input file. filename = sys.argv[1] with open(filename, "r") as handle: contents = handle.read() # Find all coverage statistics. stats = [] for line in contents.split("\n"): pat = " ([0-9]*)% \\([ 0-9]* / [ 0-9]*\\) in '([a-zA-Z.0-9]*)'" match = re.search(pat, line) if match is not None: stats.append((int(match.group(1)), match.group(2))) insufficient_coverage = False for coverage, name in stats: if coverage != 100: print("Insufficient Haddock Coverage on {}: {}" .format(name, coverage)) insufficient_coverage = True if len(stats) < 8: print(("Expecting at least 8 Haddock-covered modules.\n" "Possibly Haddock output nothing, or number of modules " "has decreased.\nIf number of modules has decreased, edit " "ensure_haddock_coverage.py to be up to date."), file=sys.stderr) insufficient_coverage = True if insufficient_coverage: sys.exit(1) if __name__ == '__main__': main()
#!/usr/bin/env python """ Tiny utility script to check that coverage statistics output by stack haddock are all 100%. """ import sys import re def main(): """Entry point to ensure-haddock-coverage.py.""" # Verify that the number of arguments is correct. if len(sys.argv) != 2: print("Usage: ./ensure-haddock-coverage.py $HADDOCK_OUT", file=sys.stderr) sys.exit(1) # Read contents of input file. filename = sys.argv[1] with open(filename, "r") as handle: contents = handle.read() # Find all coverage statistics. stats = [] for line in contents.split("\n"): pat = " ([0-9]*)% \\([ 0-9]* / [ 0-9]*\\) in '([a-zA-Z.0-9]*)'" match = re.search(pat, line) if match is not None: stats.append((int(match.group(1)), match.group(2))) insufficient_coverage = False for coverage, name in stats: if coverage != 100: print("Insufficient Haddock Coverage on {}: {}" .format(name, coverage)) insufficient_coverage = True if insufficient_coverage: sys.exit(1) if __name__ == '__main__': main() Check for number of modules in haddock-checking script#!/usr/bin/env python """ Tiny utility script to check that coverage statistics output by stack haddock are all 100%. """ import sys import re def main(): """Entry point to ensure-haddock-coverage.py.""" # Verify that the number of arguments is correct. if len(sys.argv) != 2: print("Usage: ./ensure-haddock-coverage.py $HADDOCK_OUT", file=sys.stderr) sys.exit(1) # Read contents of input file. filename = sys.argv[1] with open(filename, "r") as handle: contents = handle.read() # Find all coverage statistics. stats = [] for line in contents.split("\n"): pat = " ([0-9]*)% \\([ 0-9]* / [ 0-9]*\\) in '([a-zA-Z.0-9]*)'" match = re.search(pat, line) if match is not None: stats.append((int(match.group(1)), match.group(2))) insufficient_coverage = False for coverage, name in stats: if coverage != 100: print("Insufficient Haddock Coverage on {}: {}" .format(name, coverage)) insufficient_coverage = True if len(stats) < 8: print(("Expecting at least 8 Haddock-covered modules.\n" "Possibly Haddock output nothing, or number of modules " "has decreased.\nIf number of modules has decreased, edit " "ensure_haddock_coverage.py to be up to date."), file=sys.stderr) insufficient_coverage = True if insufficient_coverage: sys.exit(1) if __name__ == '__main__': main()
<commit_before>#!/usr/bin/env python """ Tiny utility script to check that coverage statistics output by stack haddock are all 100%. """ import sys import re def main(): """Entry point to ensure-haddock-coverage.py.""" # Verify that the number of arguments is correct. if len(sys.argv) != 2: print("Usage: ./ensure-haddock-coverage.py $HADDOCK_OUT", file=sys.stderr) sys.exit(1) # Read contents of input file. filename = sys.argv[1] with open(filename, "r") as handle: contents = handle.read() # Find all coverage statistics. stats = [] for line in contents.split("\n"): pat = " ([0-9]*)% \\([ 0-9]* / [ 0-9]*\\) in '([a-zA-Z.0-9]*)'" match = re.search(pat, line) if match is not None: stats.append((int(match.group(1)), match.group(2))) insufficient_coverage = False for coverage, name in stats: if coverage != 100: print("Insufficient Haddock Coverage on {}: {}" .format(name, coverage)) insufficient_coverage = True if insufficient_coverage: sys.exit(1) if __name__ == '__main__': main() <commit_msg>Check for number of modules in haddock-checking script<commit_after>#!/usr/bin/env python """ Tiny utility script to check that coverage statistics output by stack haddock are all 100%. """ import sys import re def main(): """Entry point to ensure-haddock-coverage.py.""" # Verify that the number of arguments is correct. if len(sys.argv) != 2: print("Usage: ./ensure-haddock-coverage.py $HADDOCK_OUT", file=sys.stderr) sys.exit(1) # Read contents of input file. filename = sys.argv[1] with open(filename, "r") as handle: contents = handle.read() # Find all coverage statistics. stats = [] for line in contents.split("\n"): pat = " ([0-9]*)% \\([ 0-9]* / [ 0-9]*\\) in '([a-zA-Z.0-9]*)'" match = re.search(pat, line) if match is not None: stats.append((int(match.group(1)), match.group(2))) insufficient_coverage = False for coverage, name in stats: if coverage != 100: print("Insufficient Haddock Coverage on {}: {}" .format(name, coverage)) insufficient_coverage = True if len(stats) < 8: print(("Expecting at least 8 Haddock-covered modules.\n" "Possibly Haddock output nothing, or number of modules " "has decreased.\nIf number of modules has decreased, edit " "ensure_haddock_coverage.py to be up to date."), file=sys.stderr) insufficient_coverage = True if insufficient_coverage: sys.exit(1) if __name__ == '__main__': main()
8b9ebbad9e87af3f56570ba3c32dcdb2d7ca4a39
django_iceberg/models/base_models.py
django_iceberg/models/base_models.py
from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError
from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: app_label = "django_iceberg" abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError
Add app_name for django < 1.7 compatibility
Add app_name for django < 1.7 compatibility
Python
mit
izberg-marketplace/django-izberg,izberg-marketplace/django-izberg,Iceberg-Marketplace/django-iceberg,Iceberg-Marketplace/django-iceberg
from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedErrorAdd app_name for django < 1.7 compatibility
from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: app_label = "django_iceberg" abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError
<commit_before> from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError<commit_msg>Add app_name for django < 1.7 compatibility<commit_after>
from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: app_label = "django_iceberg" abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError
from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedErrorAdd app_name for django < 1.7 compatibility from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: app_label = "django_iceberg" abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError
<commit_before> from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError<commit_msg>Add app_name for django < 1.7 compatibility<commit_after> from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: app_label = "django_iceberg" abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError
9535234db263f0155f515236457ee7ba5e9e1e0e
punic/cartfile.py
punic/cartfile.py
from __future__ import division, absolute_import, print_function __all__ = ['Cartfile'] import re from pathlib2 import Path from .basic_types import * from .errors import * class Cartfile(object): def __init__(self, specifications=None, overrides=None): self.specifications = specifications if specifications else [] self.overrides = overrides def read(self, source): # type: (Path) if isinstance(source, Path): if not source.exists(): raise CartfileNotFound(path=source) source = source.open().read() # TODO: This is of course super feeble parsing. URLs with #s in them can break for example lines = [line.rstrip() for line in source.splitlines()] lines = [re.sub(r'\#.+', '', line) for line in lines] self.specifications = [Specification.cartfile_string(line, self.overrides) for line in lines] def write(self, destination): # type: (File) strings = [str(specification) for specification in self.specifications] string = u'\n'.join(sorted(strings)) + '\n' destination.write(string)
from __future__ import division, absolute_import, print_function __all__ = ['Cartfile'] import re from pathlib2 import Path from .basic_types import * from .errors import * class Cartfile(object): def __init__(self, specifications=None, overrides=None): self.specifications = specifications if specifications else [] self.overrides = overrides def read(self, source): # type: (Path) if isinstance(source, Path): if not source.exists(): raise CartfileNotFound(path=source) source = source.open().read() # TODO: This is of course super feeble parsing. URLs with #s in them can break for example lines = [line.rstrip() for line in source.splitlines()] lines = [re.sub(r'\#.+', '', line) for line in lines] lines = [line.strip() for line in lines] lines = [line for line in lines if line] self.specifications = [Specification.cartfile_string(line, self.overrides) for line in lines] def write(self, destination): # type: (File) strings = [str(specification) for specification in self.specifications] string = u'\n'.join(sorted(strings)) + '\n' destination.write(string)
Fix processing of empty lines.
Fix processing of empty lines.
Python
mit
schwa/punic
from __future__ import division, absolute_import, print_function __all__ = ['Cartfile'] import re from pathlib2 import Path from .basic_types import * from .errors import * class Cartfile(object): def __init__(self, specifications=None, overrides=None): self.specifications = specifications if specifications else [] self.overrides = overrides def read(self, source): # type: (Path) if isinstance(source, Path): if not source.exists(): raise CartfileNotFound(path=source) source = source.open().read() # TODO: This is of course super feeble parsing. URLs with #s in them can break for example lines = [line.rstrip() for line in source.splitlines()] lines = [re.sub(r'\#.+', '', line) for line in lines] self.specifications = [Specification.cartfile_string(line, self.overrides) for line in lines] def write(self, destination): # type: (File) strings = [str(specification) for specification in self.specifications] string = u'\n'.join(sorted(strings)) + '\n' destination.write(string) Fix processing of empty lines.
from __future__ import division, absolute_import, print_function __all__ = ['Cartfile'] import re from pathlib2 import Path from .basic_types import * from .errors import * class Cartfile(object): def __init__(self, specifications=None, overrides=None): self.specifications = specifications if specifications else [] self.overrides = overrides def read(self, source): # type: (Path) if isinstance(source, Path): if not source.exists(): raise CartfileNotFound(path=source) source = source.open().read() # TODO: This is of course super feeble parsing. URLs with #s in them can break for example lines = [line.rstrip() for line in source.splitlines()] lines = [re.sub(r'\#.+', '', line) for line in lines] lines = [line.strip() for line in lines] lines = [line for line in lines if line] self.specifications = [Specification.cartfile_string(line, self.overrides) for line in lines] def write(self, destination): # type: (File) strings = [str(specification) for specification in self.specifications] string = u'\n'.join(sorted(strings)) + '\n' destination.write(string)
<commit_before>from __future__ import division, absolute_import, print_function __all__ = ['Cartfile'] import re from pathlib2 import Path from .basic_types import * from .errors import * class Cartfile(object): def __init__(self, specifications=None, overrides=None): self.specifications = specifications if specifications else [] self.overrides = overrides def read(self, source): # type: (Path) if isinstance(source, Path): if not source.exists(): raise CartfileNotFound(path=source) source = source.open().read() # TODO: This is of course super feeble parsing. URLs with #s in them can break for example lines = [line.rstrip() for line in source.splitlines()] lines = [re.sub(r'\#.+', '', line) for line in lines] self.specifications = [Specification.cartfile_string(line, self.overrides) for line in lines] def write(self, destination): # type: (File) strings = [str(specification) for specification in self.specifications] string = u'\n'.join(sorted(strings)) + '\n' destination.write(string) <commit_msg>Fix processing of empty lines.<commit_after>
from __future__ import division, absolute_import, print_function __all__ = ['Cartfile'] import re from pathlib2 import Path from .basic_types import * from .errors import * class Cartfile(object): def __init__(self, specifications=None, overrides=None): self.specifications = specifications if specifications else [] self.overrides = overrides def read(self, source): # type: (Path) if isinstance(source, Path): if not source.exists(): raise CartfileNotFound(path=source) source = source.open().read() # TODO: This is of course super feeble parsing. URLs with #s in them can break for example lines = [line.rstrip() for line in source.splitlines()] lines = [re.sub(r'\#.+', '', line) for line in lines] lines = [line.strip() for line in lines] lines = [line for line in lines if line] self.specifications = [Specification.cartfile_string(line, self.overrides) for line in lines] def write(self, destination): # type: (File) strings = [str(specification) for specification in self.specifications] string = u'\n'.join(sorted(strings)) + '\n' destination.write(string)
from __future__ import division, absolute_import, print_function __all__ = ['Cartfile'] import re from pathlib2 import Path from .basic_types import * from .errors import * class Cartfile(object): def __init__(self, specifications=None, overrides=None): self.specifications = specifications if specifications else [] self.overrides = overrides def read(self, source): # type: (Path) if isinstance(source, Path): if not source.exists(): raise CartfileNotFound(path=source) source = source.open().read() # TODO: This is of course super feeble parsing. URLs with #s in them can break for example lines = [line.rstrip() for line in source.splitlines()] lines = [re.sub(r'\#.+', '', line) for line in lines] self.specifications = [Specification.cartfile_string(line, self.overrides) for line in lines] def write(self, destination): # type: (File) strings = [str(specification) for specification in self.specifications] string = u'\n'.join(sorted(strings)) + '\n' destination.write(string) Fix processing of empty lines.from __future__ import division, absolute_import, print_function __all__ = ['Cartfile'] import re from pathlib2 import Path from .basic_types import * from .errors import * class Cartfile(object): def __init__(self, specifications=None, overrides=None): self.specifications = specifications if specifications else [] self.overrides = overrides def read(self, source): # type: (Path) if isinstance(source, Path): if not source.exists(): raise CartfileNotFound(path=source) source = source.open().read() # TODO: This is of course super feeble parsing. URLs with #s in them can break for example lines = [line.rstrip() for line in source.splitlines()] lines = [re.sub(r'\#.+', '', line) for line in lines] lines = [line.strip() for line in lines] lines = [line for line in lines if line] self.specifications = [Specification.cartfile_string(line, self.overrides) for line in lines] def write(self, destination): # type: (File) strings = [str(specification) for specification in self.specifications] string = u'\n'.join(sorted(strings)) + '\n' destination.write(string)
<commit_before>from __future__ import division, absolute_import, print_function __all__ = ['Cartfile'] import re from pathlib2 import Path from .basic_types import * from .errors import * class Cartfile(object): def __init__(self, specifications=None, overrides=None): self.specifications = specifications if specifications else [] self.overrides = overrides def read(self, source): # type: (Path) if isinstance(source, Path): if not source.exists(): raise CartfileNotFound(path=source) source = source.open().read() # TODO: This is of course super feeble parsing. URLs with #s in them can break for example lines = [line.rstrip() for line in source.splitlines()] lines = [re.sub(r'\#.+', '', line) for line in lines] self.specifications = [Specification.cartfile_string(line, self.overrides) for line in lines] def write(self, destination): # type: (File) strings = [str(specification) for specification in self.specifications] string = u'\n'.join(sorted(strings)) + '\n' destination.write(string) <commit_msg>Fix processing of empty lines.<commit_after>from __future__ import division, absolute_import, print_function __all__ = ['Cartfile'] import re from pathlib2 import Path from .basic_types import * from .errors import * class Cartfile(object): def __init__(self, specifications=None, overrides=None): self.specifications = specifications if specifications else [] self.overrides = overrides def read(self, source): # type: (Path) if isinstance(source, Path): if not source.exists(): raise CartfileNotFound(path=source) source = source.open().read() # TODO: This is of course super feeble parsing. URLs with #s in them can break for example lines = [line.rstrip() for line in source.splitlines()] lines = [re.sub(r'\#.+', '', line) for line in lines] lines = [line.strip() for line in lines] lines = [line for line in lines if line] self.specifications = [Specification.cartfile_string(line, self.overrides) for line in lines] def write(self, destination): # type: (File) strings = [str(specification) for specification in self.specifications] string = u'\n'.join(sorted(strings)) + '\n' destination.write(string)
0c186d8e0fb5bd7170ec55943e546f1e4e335839
masters/master.tryserver.chromium/master_site_config.py
masters/master.tryserver.chromium/master_site_config.py
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class TryServer(Master.Master4): project_name = 'Chromium Try Server' master_port = 8028 slave_port = 8128 master_port_alt = 8228 try_job_port = 8328 # Select tree status urls and codereview location. reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr' last_good_blink_url = 'http://blink-status.appspot.com/lkgr' svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try' buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class TryServer(Master.Master4): project_name = 'Chromium Try Server' master_port = 8028 slave_port = 8128 master_port_alt = 8228 try_job_port = 8328 # Select tree status urls and codereview location. reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = None last_good_blink_url = None svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try' buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
Remove last good URL for tryserver.chromium
Remove last good URL for tryserver.chromium The expected behavior of this change is that the tryserver master no longer tries to resolve revisions to LKGR when trying jobs. BUG=372499, 386667 Review URL: https://codereview.chromium.org/394653002 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@283469 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
eunchong/build,eunchong/build,eunchong/build,eunchong/build
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class TryServer(Master.Master4): project_name = 'Chromium Try Server' master_port = 8028 slave_port = 8128 master_port_alt = 8228 try_job_port = 8328 # Select tree status urls and codereview location. reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr' last_good_blink_url = 'http://blink-status.appspot.com/lkgr' svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try' buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/' Remove last good URL for tryserver.chromium The expected behavior of this change is that the tryserver master no longer tries to resolve revisions to LKGR when trying jobs. BUG=372499, 386667 Review URL: https://codereview.chromium.org/394653002 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@283469 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class TryServer(Master.Master4): project_name = 'Chromium Try Server' master_port = 8028 slave_port = 8128 master_port_alt = 8228 try_job_port = 8328 # Select tree status urls and codereview location. reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = None last_good_blink_url = None svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try' buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class TryServer(Master.Master4): project_name = 'Chromium Try Server' master_port = 8028 slave_port = 8128 master_port_alt = 8228 try_job_port = 8328 # Select tree status urls and codereview location. reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr' last_good_blink_url = 'http://blink-status.appspot.com/lkgr' svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try' buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/' <commit_msg>Remove last good URL for tryserver.chromium The expected behavior of this change is that the tryserver master no longer tries to resolve revisions to LKGR when trying jobs. BUG=372499, 386667 Review URL: https://codereview.chromium.org/394653002 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@283469 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class TryServer(Master.Master4): project_name = 'Chromium Try Server' master_port = 8028 slave_port = 8128 master_port_alt = 8228 try_job_port = 8328 # Select tree status urls and codereview location. reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = None last_good_blink_url = None svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try' buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class TryServer(Master.Master4): project_name = 'Chromium Try Server' master_port = 8028 slave_port = 8128 master_port_alt = 8228 try_job_port = 8328 # Select tree status urls and codereview location. reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr' last_good_blink_url = 'http://blink-status.appspot.com/lkgr' svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try' buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/' Remove last good URL for tryserver.chromium The expected behavior of this change is that the tryserver master no longer tries to resolve revisions to LKGR when trying jobs. BUG=372499, 386667 Review URL: https://codereview.chromium.org/394653002 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@283469 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class TryServer(Master.Master4): project_name = 'Chromium Try Server' master_port = 8028 slave_port = 8128 master_port_alt = 8228 try_job_port = 8328 # Select tree status urls and codereview location. reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = None last_good_blink_url = None svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try' buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class TryServer(Master.Master4): project_name = 'Chromium Try Server' master_port = 8028 slave_port = 8128 master_port_alt = 8228 try_job_port = 8328 # Select tree status urls and codereview location. reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = base_app_url + '/lkgr' last_good_blink_url = 'http://blink-status.appspot.com/lkgr' svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try' buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/' <commit_msg>Remove last good URL for tryserver.chromium The expected behavior of this change is that the tryserver master no longer tries to resolve revisions to LKGR when trying jobs. BUG=372499, 386667 Review URL: https://codereview.chromium.org/394653002 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@283469 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ActiveMaster definition.""" from config_bootstrap import Master class TryServer(Master.Master4): project_name = 'Chromium Try Server' master_port = 8028 slave_port = 8128 master_port_alt = 8228 try_job_port = 8328 # Select tree status urls and codereview location. reply_to = 'chrome-troopers+tryserver@google.com' base_app_url = 'https://chromium-status.appspot.com' tree_status_url = base_app_url + '/status' store_revisions_url = base_app_url + '/revisions' last_good_url = None last_good_blink_url = None svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try' buildbot_url = 'http://build.chromium.org/p/tryserver.chromium/'
94e344b48161e20fec5023fbeea4a14cdc736158
pynuts/filters.py
pynuts/filters.py
# -*- coding: utf-8 -*- """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Field data beautifier. QuerySelectMultipleField Renders comma-separated data. QuerySelectField Renders the selected value. BooleanField Renders '✓' or '✕' Example: .. sourcecode:: html+jinja <dd>{{ field | data }}</dd> """ if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data)
# -*- coding: utf-8 -*- """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Field data beautifier. QuerySelectMultipleField Renders comma-separated data. QuerySelectField Renders the selected value. BooleanField Renders '✓' or '✕' Example: .. sourcecode:: html+jinja <dd>{{ field | data }}</dd> """ if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) else: return u'∅' elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data)
Return an "empty sequence" character instead of an empty list for empty select fields
Return an "empty sequence" character instead of an empty list for empty select fields
Python
bsd-3-clause
Kozea/Pynuts,Kozea/Pynuts,Kozea/Pynuts
# -*- coding: utf-8 -*- """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Field data beautifier. QuerySelectMultipleField Renders comma-separated data. QuerySelectField Renders the selected value. BooleanField Renders '✓' or '✕' Example: .. sourcecode:: html+jinja <dd>{{ field | data }}</dd> """ if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data) Return an "empty sequence" character instead of an empty list for empty select fields
# -*- coding: utf-8 -*- """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Field data beautifier. QuerySelectMultipleField Renders comma-separated data. QuerySelectField Renders the selected value. BooleanField Renders '✓' or '✕' Example: .. sourcecode:: html+jinja <dd>{{ field | data }}</dd> """ if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) else: return u'∅' elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data)
<commit_before># -*- coding: utf-8 -*- """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Field data beautifier. QuerySelectMultipleField Renders comma-separated data. QuerySelectField Renders the selected value. BooleanField Renders '✓' or '✕' Example: .. sourcecode:: html+jinja <dd>{{ field | data }}</dd> """ if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data) <commit_msg>Return an "empty sequence" character instead of an empty list for empty select fields<commit_after>
# -*- coding: utf-8 -*- """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Field data beautifier. QuerySelectMultipleField Renders comma-separated data. QuerySelectField Renders the selected value. BooleanField Renders '✓' or '✕' Example: .. sourcecode:: html+jinja <dd>{{ field | data }}</dd> """ if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) else: return u'∅' elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data)
# -*- coding: utf-8 -*- """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Field data beautifier. QuerySelectMultipleField Renders comma-separated data. QuerySelectField Renders the selected value. BooleanField Renders '✓' or '✕' Example: .. sourcecode:: html+jinja <dd>{{ field | data }}</dd> """ if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data) Return an "empty sequence" character instead of an empty list for empty select fields# -*- coding: utf-8 -*- """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Field data beautifier. QuerySelectMultipleField Renders comma-separated data. QuerySelectField Renders the selected value. BooleanField Renders '✓' or '✕' Example: .. sourcecode:: html+jinja <dd>{{ field | data }}</dd> """ if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) else: return u'∅' elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data)
<commit_before># -*- coding: utf-8 -*- """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Field data beautifier. QuerySelectMultipleField Renders comma-separated data. QuerySelectField Renders the selected value. BooleanField Renders '✓' or '✕' Example: .. sourcecode:: html+jinja <dd>{{ field | data }}</dd> """ if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data) <commit_msg>Return an "empty sequence" character instead of an empty list for empty select fields<commit_after># -*- coding: utf-8 -*- """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Field data beautifier. QuerySelectMultipleField Renders comma-separated data. QuerySelectField Renders the selected value. BooleanField Renders '✓' or '✕' Example: .. sourcecode:: html+jinja <dd>{{ field | data }}</dd> """ if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) else: return u'∅' elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data)
7111860577c921dc3d1602fa16b22ddfb45b69ed
lots/migrations/0002_auto_20170717_2115.py
lots/migrations/0002_auto_20170717_2115.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-18 02:15 from __future__ import unicode_literals from django.db import models, migrations def load_data(apps, schema_editor): LotType = apps.get_model("lots", "LotType") LotType(name="Casa").save() LotType(name="Lote").save() class Migration(migrations.Migration): dependencies = [ ('lots', '0001_initial'), ] operations = [ migrations.RunPython(load_data) ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-18 02:15 from __future__ import unicode_literals from django.db import models, migrations from lots.models import LotType def load_data(apps, schema_editor): LotType = apps.get_model("lots", "LotType") LotType(name="Casa").save() LotType(name="Lote").save() def remove_data(apps, schema_editor): LotType.objects.all().delete() class Migration(migrations.Migration): dependencies = [ ('lots', '0001_initial'), ] operations = [ migrations.RunPython(load_data, remove_data) ]
Add reverse action to importing default lot types
Add reverse action to importing default lot types
Python
mpl-2.0
jackbravo/condorest-django,jackbravo/condorest-django,jackbravo/condorest-django
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-18 02:15 from __future__ import unicode_literals from django.db import models, migrations def load_data(apps, schema_editor): LotType = apps.get_model("lots", "LotType") LotType(name="Casa").save() LotType(name="Lote").save() class Migration(migrations.Migration): dependencies = [ ('lots', '0001_initial'), ] operations = [ migrations.RunPython(load_data) ] Add reverse action to importing default lot types
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-18 02:15 from __future__ import unicode_literals from django.db import models, migrations from lots.models import LotType def load_data(apps, schema_editor): LotType = apps.get_model("lots", "LotType") LotType(name="Casa").save() LotType(name="Lote").save() def remove_data(apps, schema_editor): LotType.objects.all().delete() class Migration(migrations.Migration): dependencies = [ ('lots', '0001_initial'), ] operations = [ migrations.RunPython(load_data, remove_data) ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-18 02:15 from __future__ import unicode_literals from django.db import models, migrations def load_data(apps, schema_editor): LotType = apps.get_model("lots", "LotType") LotType(name="Casa").save() LotType(name="Lote").save() class Migration(migrations.Migration): dependencies = [ ('lots', '0001_initial'), ] operations = [ migrations.RunPython(load_data) ] <commit_msg>Add reverse action to importing default lot types<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-18 02:15 from __future__ import unicode_literals from django.db import models, migrations from lots.models import LotType def load_data(apps, schema_editor): LotType = apps.get_model("lots", "LotType") LotType(name="Casa").save() LotType(name="Lote").save() def remove_data(apps, schema_editor): LotType.objects.all().delete() class Migration(migrations.Migration): dependencies = [ ('lots', '0001_initial'), ] operations = [ migrations.RunPython(load_data, remove_data) ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-18 02:15 from __future__ import unicode_literals from django.db import models, migrations def load_data(apps, schema_editor): LotType = apps.get_model("lots", "LotType") LotType(name="Casa").save() LotType(name="Lote").save() class Migration(migrations.Migration): dependencies = [ ('lots', '0001_initial'), ] operations = [ migrations.RunPython(load_data) ] Add reverse action to importing default lot types# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-18 02:15 from __future__ import unicode_literals from django.db import models, migrations from lots.models import LotType def load_data(apps, schema_editor): LotType = apps.get_model("lots", "LotType") LotType(name="Casa").save() LotType(name="Lote").save() def remove_data(apps, schema_editor): LotType.objects.all().delete() class Migration(migrations.Migration): dependencies = [ ('lots', '0001_initial'), ] operations = [ migrations.RunPython(load_data, remove_data) ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-18 02:15 from __future__ import unicode_literals from django.db import models, migrations def load_data(apps, schema_editor): LotType = apps.get_model("lots", "LotType") LotType(name="Casa").save() LotType(name="Lote").save() class Migration(migrations.Migration): dependencies = [ ('lots', '0001_initial'), ] operations = [ migrations.RunPython(load_data) ] <commit_msg>Add reverse action to importing default lot types<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-18 02:15 from __future__ import unicode_literals from django.db import models, migrations from lots.models import LotType def load_data(apps, schema_editor): LotType = apps.get_model("lots", "LotType") LotType(name="Casa").save() LotType(name="Lote").save() def remove_data(apps, schema_editor): LotType.objects.all().delete() class Migration(migrations.Migration): dependencies = [ ('lots', '0001_initial'), ] operations = [ migrations.RunPython(load_data, remove_data) ]
78f55cf57d88378e59cf1399d7ef80082d3f9555
bluebottle/partners/serializers.py
bluebottle/partners/serializers.py
from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectPreviewSerializer as BaseProjectPreviewSerializer from rest_framework import serializers # This is a bit of a hack. We have an existing ProjectPreviewSerializer in /bb_projects/serializers. # However, that serializer depends on properties calculated in the ProjectPreview view. Therefore, we # cannot re-use the serializer. The serialzier below is the same, except it has the fields "people_requested" # and "people_registered" removed. from bluebottle.utils.serializer_dispatcher import get_serializer_class class ProjectPreviewSerializer(BaseProjectPreviewSerializer): task_count = serializers.IntegerField(source='task_count') owner = get_serializer_class('AUTH_USER_MODEL', 'preview')(source='owner') partner = serializers.SlugRelatedField(slug_field='slug', source='partner_organization') is_funding = serializers.Field() class Meta(BaseProjectPreviewSerializer): model = BaseProjectPreviewSerializer.Meta.model fields = ('id', 'title', 'image', 'status', 'pitch', 'country', 'task_count', 'allow_overfunding', 'latitude', 'longitude', 'is_campaign', 'amount_asked', 'amount_donated', 'amount_needed', 'amount_extra', 'deadline', 'status', 'owner', 'partner', 'is_funding') class PartnerOrganizationPreviewSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) class Meta: model = PartnerOrganization fields = ('id', 'name', ) class PartnerOrganizationSerializer(PartnerOrganizationPreviewSerializer): projects = ProjectPreviewSerializer(source='projects') image = ImageSerializer(required=False) description = serializers.CharField(source='description') class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image')
from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectPreviewSerializer from rest_framework import serializers class PartnerOrganizationPreviewSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) class Meta: model = PartnerOrganization fields = ('id', 'name', ) class PartnerOrganizationSerializer(PartnerOrganizationPreviewSerializer): projects = ProjectPreviewSerializer(source='projects') image = ImageSerializer(required=False) description = serializers.CharField(source='description') class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image')
Fix partner project serializer. (The comment was outdated btw)
Fix partner project serializer. (The comment was outdated btw)
Python
bsd-3-clause
onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle
from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectPreviewSerializer as BaseProjectPreviewSerializer from rest_framework import serializers # This is a bit of a hack. We have an existing ProjectPreviewSerializer in /bb_projects/serializers. # However, that serializer depends on properties calculated in the ProjectPreview view. Therefore, we # cannot re-use the serializer. The serialzier below is the same, except it has the fields "people_requested" # and "people_registered" removed. from bluebottle.utils.serializer_dispatcher import get_serializer_class class ProjectPreviewSerializer(BaseProjectPreviewSerializer): task_count = serializers.IntegerField(source='task_count') owner = get_serializer_class('AUTH_USER_MODEL', 'preview')(source='owner') partner = serializers.SlugRelatedField(slug_field='slug', source='partner_organization') is_funding = serializers.Field() class Meta(BaseProjectPreviewSerializer): model = BaseProjectPreviewSerializer.Meta.model fields = ('id', 'title', 'image', 'status', 'pitch', 'country', 'task_count', 'allow_overfunding', 'latitude', 'longitude', 'is_campaign', 'amount_asked', 'amount_donated', 'amount_needed', 'amount_extra', 'deadline', 'status', 'owner', 'partner', 'is_funding') class PartnerOrganizationPreviewSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) class Meta: model = PartnerOrganization fields = ('id', 'name', ) class PartnerOrganizationSerializer(PartnerOrganizationPreviewSerializer): projects = ProjectPreviewSerializer(source='projects') image = ImageSerializer(required=False) description = serializers.CharField(source='description') class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image') Fix partner project serializer. (The comment was outdated btw)
from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectPreviewSerializer from rest_framework import serializers class PartnerOrganizationPreviewSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) class Meta: model = PartnerOrganization fields = ('id', 'name', ) class PartnerOrganizationSerializer(PartnerOrganizationPreviewSerializer): projects = ProjectPreviewSerializer(source='projects') image = ImageSerializer(required=False) description = serializers.CharField(source='description') class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image')
<commit_before>from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectPreviewSerializer as BaseProjectPreviewSerializer from rest_framework import serializers # This is a bit of a hack. We have an existing ProjectPreviewSerializer in /bb_projects/serializers. # However, that serializer depends on properties calculated in the ProjectPreview view. Therefore, we # cannot re-use the serializer. The serialzier below is the same, except it has the fields "people_requested" # and "people_registered" removed. from bluebottle.utils.serializer_dispatcher import get_serializer_class class ProjectPreviewSerializer(BaseProjectPreviewSerializer): task_count = serializers.IntegerField(source='task_count') owner = get_serializer_class('AUTH_USER_MODEL', 'preview')(source='owner') partner = serializers.SlugRelatedField(slug_field='slug', source='partner_organization') is_funding = serializers.Field() class Meta(BaseProjectPreviewSerializer): model = BaseProjectPreviewSerializer.Meta.model fields = ('id', 'title', 'image', 'status', 'pitch', 'country', 'task_count', 'allow_overfunding', 'latitude', 'longitude', 'is_campaign', 'amount_asked', 'amount_donated', 'amount_needed', 'amount_extra', 'deadline', 'status', 'owner', 'partner', 'is_funding') class PartnerOrganizationPreviewSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) class Meta: model = PartnerOrganization fields = ('id', 'name', ) class PartnerOrganizationSerializer(PartnerOrganizationPreviewSerializer): projects = ProjectPreviewSerializer(source='projects') image = ImageSerializer(required=False) description = serializers.CharField(source='description') class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image') <commit_msg>Fix partner project serializer. (The comment was outdated btw)<commit_after>
from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectPreviewSerializer from rest_framework import serializers class PartnerOrganizationPreviewSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) class Meta: model = PartnerOrganization fields = ('id', 'name', ) class PartnerOrganizationSerializer(PartnerOrganizationPreviewSerializer): projects = ProjectPreviewSerializer(source='projects') image = ImageSerializer(required=False) description = serializers.CharField(source='description') class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image')
from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectPreviewSerializer as BaseProjectPreviewSerializer from rest_framework import serializers # This is a bit of a hack. We have an existing ProjectPreviewSerializer in /bb_projects/serializers. # However, that serializer depends on properties calculated in the ProjectPreview view. Therefore, we # cannot re-use the serializer. The serialzier below is the same, except it has the fields "people_requested" # and "people_registered" removed. from bluebottle.utils.serializer_dispatcher import get_serializer_class class ProjectPreviewSerializer(BaseProjectPreviewSerializer): task_count = serializers.IntegerField(source='task_count') owner = get_serializer_class('AUTH_USER_MODEL', 'preview')(source='owner') partner = serializers.SlugRelatedField(slug_field='slug', source='partner_organization') is_funding = serializers.Field() class Meta(BaseProjectPreviewSerializer): model = BaseProjectPreviewSerializer.Meta.model fields = ('id', 'title', 'image', 'status', 'pitch', 'country', 'task_count', 'allow_overfunding', 'latitude', 'longitude', 'is_campaign', 'amount_asked', 'amount_donated', 'amount_needed', 'amount_extra', 'deadline', 'status', 'owner', 'partner', 'is_funding') class PartnerOrganizationPreviewSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) class Meta: model = PartnerOrganization fields = ('id', 'name', ) class PartnerOrganizationSerializer(PartnerOrganizationPreviewSerializer): projects = ProjectPreviewSerializer(source='projects') image = ImageSerializer(required=False) description = serializers.CharField(source='description') class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image') Fix partner project serializer. (The comment was outdated btw)from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectPreviewSerializer from rest_framework import serializers class PartnerOrganizationPreviewSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) class Meta: model = PartnerOrganization fields = ('id', 'name', ) class PartnerOrganizationSerializer(PartnerOrganizationPreviewSerializer): projects = ProjectPreviewSerializer(source='projects') image = ImageSerializer(required=False) description = serializers.CharField(source='description') class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image')
<commit_before>from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectPreviewSerializer as BaseProjectPreviewSerializer from rest_framework import serializers # This is a bit of a hack. We have an existing ProjectPreviewSerializer in /bb_projects/serializers. # However, that serializer depends on properties calculated in the ProjectPreview view. Therefore, we # cannot re-use the serializer. The serialzier below is the same, except it has the fields "people_requested" # and "people_registered" removed. from bluebottle.utils.serializer_dispatcher import get_serializer_class class ProjectPreviewSerializer(BaseProjectPreviewSerializer): task_count = serializers.IntegerField(source='task_count') owner = get_serializer_class('AUTH_USER_MODEL', 'preview')(source='owner') partner = serializers.SlugRelatedField(slug_field='slug', source='partner_organization') is_funding = serializers.Field() class Meta(BaseProjectPreviewSerializer): model = BaseProjectPreviewSerializer.Meta.model fields = ('id', 'title', 'image', 'status', 'pitch', 'country', 'task_count', 'allow_overfunding', 'latitude', 'longitude', 'is_campaign', 'amount_asked', 'amount_donated', 'amount_needed', 'amount_extra', 'deadline', 'status', 'owner', 'partner', 'is_funding') class PartnerOrganizationPreviewSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) class Meta: model = PartnerOrganization fields = ('id', 'name', ) class PartnerOrganizationSerializer(PartnerOrganizationPreviewSerializer): projects = ProjectPreviewSerializer(source='projects') image = ImageSerializer(required=False) description = serializers.CharField(source='description') class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image') <commit_msg>Fix partner project serializer. (The comment was outdated btw)<commit_after>from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectPreviewSerializer from rest_framework import serializers class PartnerOrganizationPreviewSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) class Meta: model = PartnerOrganization fields = ('id', 'name', ) class PartnerOrganizationSerializer(PartnerOrganizationPreviewSerializer): projects = ProjectPreviewSerializer(source='projects') image = ImageSerializer(required=False) description = serializers.CharField(source='description') class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image')
b973a1686f269044e670704b56c07ca79336c29c
mythril/laser/ethereum/strategy/basic.py
mythril/laser/ethereum/strategy/basic.py
class DepthFirstSearchStrategy: def __init__(self, content, max_depth): self.content = content self.max_depth = max_depth def __iter__(self): return self def __next__(self): try: global_state = self.content.pop(0) if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
""" This module implements basic symbolic execution search strategies """ class DepthFirstSearchStrategy: """ Implements a depth first search strategy I.E. Follow one path to a leaf, and then continue to the next one """ def __init__(self, work_list, max_depth): self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): """ Picks the next state to execute """ try: # This strategies assumes that new states are appended at the end of the work_list # By taking the last element we effectively pick the "newest" states, which amounts to dfs global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
Add documentation and fix pop
Add documentation and fix pop
Python
mit
b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril
class DepthFirstSearchStrategy: def __init__(self, content, max_depth): self.content = content self.max_depth = max_depth def __iter__(self): return self def __next__(self): try: global_state = self.content.pop(0) if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration() Add documentation and fix pop
""" This module implements basic symbolic execution search strategies """ class DepthFirstSearchStrategy: """ Implements a depth first search strategy I.E. Follow one path to a leaf, and then continue to the next one """ def __init__(self, work_list, max_depth): self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): """ Picks the next state to execute """ try: # This strategies assumes that new states are appended at the end of the work_list # By taking the last element we effectively pick the "newest" states, which amounts to dfs global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
<commit_before>class DepthFirstSearchStrategy: def __init__(self, content, max_depth): self.content = content self.max_depth = max_depth def __iter__(self): return self def __next__(self): try: global_state = self.content.pop(0) if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration() <commit_msg>Add documentation and fix pop<commit_after>
""" This module implements basic symbolic execution search strategies """ class DepthFirstSearchStrategy: """ Implements a depth first search strategy I.E. Follow one path to a leaf, and then continue to the next one """ def __init__(self, work_list, max_depth): self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): """ Picks the next state to execute """ try: # This strategies assumes that new states are appended at the end of the work_list # By taking the last element we effectively pick the "newest" states, which amounts to dfs global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
class DepthFirstSearchStrategy: def __init__(self, content, max_depth): self.content = content self.max_depth = max_depth def __iter__(self): return self def __next__(self): try: global_state = self.content.pop(0) if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration() Add documentation and fix pop""" This module implements basic symbolic execution search strategies """ class DepthFirstSearchStrategy: """ Implements a depth first search strategy I.E. Follow one path to a leaf, and then continue to the next one """ def __init__(self, work_list, max_depth): self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): """ Picks the next state to execute """ try: # This strategies assumes that new states are appended at the end of the work_list # By taking the last element we effectively pick the "newest" states, which amounts to dfs global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
<commit_before>class DepthFirstSearchStrategy: def __init__(self, content, max_depth): self.content = content self.max_depth = max_depth def __iter__(self): return self def __next__(self): try: global_state = self.content.pop(0) if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration() <commit_msg>Add documentation and fix pop<commit_after>""" This module implements basic symbolic execution search strategies """ class DepthFirstSearchStrategy: """ Implements a depth first search strategy I.E. Follow one path to a leaf, and then continue to the next one """ def __init__(self, work_list, max_depth): self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): """ Picks the next state to execute """ try: # This strategies assumes that new states are appended at the end of the work_list # By taking the last element we effectively pick the "newest" states, which amounts to dfs global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
052c0cfd1ce21b36c9c9a44193e3a9c89ca871f1
ciscripts/coverage/bii/coverage.py
ciscripts/coverage/bii/coverage.py
# /ciscripts/coverage/bii/coverage.py # # Submit coverage totals for a bii project to coveralls # # See /LICENCE.md for Copyright information """Submit coverage totals for a bii project to coveralls.""" import errno import os from contextlib import contextmanager @contextmanager def _bii_deps_in_place(cont): """Move bii project dependencies into layout. The coverage step may require these dependencies to be present. """ bii_dir = os.path.join(cont.named_cache_dir("cmake-build"), "bii") try: os.rename(bii_dir, os.path.join(os.getcwd(), "bii")) except OSError as error: if error.errno != errno.ENOENT: raise error def run(cont, util, shell, argv=None): """Submit coverage total to coveralls, with bii specific preparation.""" with _bii_deps_in_place(cont): util.fetch_and_import("coverage/cmake/coverage.py").run(cont, util, shell, argv)
# /ciscripts/coverage/bii/coverage.py # # Submit coverage totals for a bii project to coveralls # # See /LICENCE.md for Copyright information """Submit coverage totals for a bii project to coveralls.""" import errno import os from contextlib import contextmanager def _move_ignore_enoent(src, dst): """Move src to dst, ignoring ENOENT.""" try: os.rename(src, dst) except OSError as error: if error.errno != errno.ENOENT: raise error @contextmanager def _bii_deps_in_place(cont): """Move bii project dependencies into layout. The coverage step may require these dependencies to be present. """ bii_dir = os.path.join(cont.named_cache_dir("cmake-build"), "bii") _move_ignore_enoent(bii_dir, os.path.join(os.getcwd(), "bii")) try: yield finally: _move_ignore_enoent(os.path.join(os.getcwd(), "bii"), bii_dir) def run(cont, util, shell, argv=None): """Submit coverage total to coveralls, with bii specific preparation.""" with _bii_deps_in_place(cont): util.fetch_and_import("coverage/cmake/coverage.py").run(cont, util, shell, argv)
Make _bii_deps_in_place actually behave like a context manager
bii: Make _bii_deps_in_place actually behave like a context manager
Python
mit
polysquare/polysquare-ci-scripts,polysquare/polysquare-ci-scripts
# /ciscripts/coverage/bii/coverage.py # # Submit coverage totals for a bii project to coveralls # # See /LICENCE.md for Copyright information """Submit coverage totals for a bii project to coveralls.""" import errno import os from contextlib import contextmanager @contextmanager def _bii_deps_in_place(cont): """Move bii project dependencies into layout. The coverage step may require these dependencies to be present. """ bii_dir = os.path.join(cont.named_cache_dir("cmake-build"), "bii") try: os.rename(bii_dir, os.path.join(os.getcwd(), "bii")) except OSError as error: if error.errno != errno.ENOENT: raise error def run(cont, util, shell, argv=None): """Submit coverage total to coveralls, with bii specific preparation.""" with _bii_deps_in_place(cont): util.fetch_and_import("coverage/cmake/coverage.py").run(cont, util, shell, argv) bii: Make _bii_deps_in_place actually behave like a context manager
# /ciscripts/coverage/bii/coverage.py # # Submit coverage totals for a bii project to coveralls # # See /LICENCE.md for Copyright information """Submit coverage totals for a bii project to coveralls.""" import errno import os from contextlib import contextmanager def _move_ignore_enoent(src, dst): """Move src to dst, ignoring ENOENT.""" try: os.rename(src, dst) except OSError as error: if error.errno != errno.ENOENT: raise error @contextmanager def _bii_deps_in_place(cont): """Move bii project dependencies into layout. The coverage step may require these dependencies to be present. """ bii_dir = os.path.join(cont.named_cache_dir("cmake-build"), "bii") _move_ignore_enoent(bii_dir, os.path.join(os.getcwd(), "bii")) try: yield finally: _move_ignore_enoent(os.path.join(os.getcwd(), "bii"), bii_dir) def run(cont, util, shell, argv=None): """Submit coverage total to coveralls, with bii specific preparation.""" with _bii_deps_in_place(cont): util.fetch_and_import("coverage/cmake/coverage.py").run(cont, util, shell, argv)
<commit_before># /ciscripts/coverage/bii/coverage.py # # Submit coverage totals for a bii project to coveralls # # See /LICENCE.md for Copyright information """Submit coverage totals for a bii project to coveralls.""" import errno import os from contextlib import contextmanager @contextmanager def _bii_deps_in_place(cont): """Move bii project dependencies into layout. The coverage step may require these dependencies to be present. """ bii_dir = os.path.join(cont.named_cache_dir("cmake-build"), "bii") try: os.rename(bii_dir, os.path.join(os.getcwd(), "bii")) except OSError as error: if error.errno != errno.ENOENT: raise error def run(cont, util, shell, argv=None): """Submit coverage total to coveralls, with bii specific preparation.""" with _bii_deps_in_place(cont): util.fetch_and_import("coverage/cmake/coverage.py").run(cont, util, shell, argv) <commit_msg>bii: Make _bii_deps_in_place actually behave like a context manager<commit_after>
# /ciscripts/coverage/bii/coverage.py # # Submit coverage totals for a bii project to coveralls # # See /LICENCE.md for Copyright information """Submit coverage totals for a bii project to coveralls.""" import errno import os from contextlib import contextmanager def _move_ignore_enoent(src, dst): """Move src to dst, ignoring ENOENT.""" try: os.rename(src, dst) except OSError as error: if error.errno != errno.ENOENT: raise error @contextmanager def _bii_deps_in_place(cont): """Move bii project dependencies into layout. The coverage step may require these dependencies to be present. """ bii_dir = os.path.join(cont.named_cache_dir("cmake-build"), "bii") _move_ignore_enoent(bii_dir, os.path.join(os.getcwd(), "bii")) try: yield finally: _move_ignore_enoent(os.path.join(os.getcwd(), "bii"), bii_dir) def run(cont, util, shell, argv=None): """Submit coverage total to coveralls, with bii specific preparation.""" with _bii_deps_in_place(cont): util.fetch_and_import("coverage/cmake/coverage.py").run(cont, util, shell, argv)
# /ciscripts/coverage/bii/coverage.py # # Submit coverage totals for a bii project to coveralls # # See /LICENCE.md for Copyright information """Submit coverage totals for a bii project to coveralls.""" import errno import os from contextlib import contextmanager @contextmanager def _bii_deps_in_place(cont): """Move bii project dependencies into layout. The coverage step may require these dependencies to be present. """ bii_dir = os.path.join(cont.named_cache_dir("cmake-build"), "bii") try: os.rename(bii_dir, os.path.join(os.getcwd(), "bii")) except OSError as error: if error.errno != errno.ENOENT: raise error def run(cont, util, shell, argv=None): """Submit coverage total to coveralls, with bii specific preparation.""" with _bii_deps_in_place(cont): util.fetch_and_import("coverage/cmake/coverage.py").run(cont, util, shell, argv) bii: Make _bii_deps_in_place actually behave like a context manager# /ciscripts/coverage/bii/coverage.py # # Submit coverage totals for a bii project to coveralls # # See /LICENCE.md for Copyright information """Submit coverage totals for a bii project to coveralls.""" import errno import os from contextlib import contextmanager def _move_ignore_enoent(src, dst): """Move src to dst, ignoring ENOENT.""" try: os.rename(src, dst) except OSError as error: if error.errno != errno.ENOENT: raise error @contextmanager def _bii_deps_in_place(cont): """Move bii project dependencies into layout. The coverage step may require these dependencies to be present. """ bii_dir = os.path.join(cont.named_cache_dir("cmake-build"), "bii") _move_ignore_enoent(bii_dir, os.path.join(os.getcwd(), "bii")) try: yield finally: _move_ignore_enoent(os.path.join(os.getcwd(), "bii"), bii_dir) def run(cont, util, shell, argv=None): """Submit coverage total to coveralls, with bii specific preparation.""" with _bii_deps_in_place(cont): util.fetch_and_import("coverage/cmake/coverage.py").run(cont, util, shell, argv)
<commit_before># /ciscripts/coverage/bii/coverage.py # # Submit coverage totals for a bii project to coveralls # # See /LICENCE.md for Copyright information """Submit coverage totals for a bii project to coveralls.""" import errno import os from contextlib import contextmanager @contextmanager def _bii_deps_in_place(cont): """Move bii project dependencies into layout. The coverage step may require these dependencies to be present. """ bii_dir = os.path.join(cont.named_cache_dir("cmake-build"), "bii") try: os.rename(bii_dir, os.path.join(os.getcwd(), "bii")) except OSError as error: if error.errno != errno.ENOENT: raise error def run(cont, util, shell, argv=None): """Submit coverage total to coveralls, with bii specific preparation.""" with _bii_deps_in_place(cont): util.fetch_and_import("coverage/cmake/coverage.py").run(cont, util, shell, argv) <commit_msg>bii: Make _bii_deps_in_place actually behave like a context manager<commit_after># /ciscripts/coverage/bii/coverage.py # # Submit coverage totals for a bii project to coveralls # # See /LICENCE.md for Copyright information """Submit coverage totals for a bii project to coveralls.""" import errno import os from contextlib import contextmanager def _move_ignore_enoent(src, dst): """Move src to dst, ignoring ENOENT.""" try: os.rename(src, dst) except OSError as error: if error.errno != errno.ENOENT: raise error @contextmanager def _bii_deps_in_place(cont): """Move bii project dependencies into layout. The coverage step may require these dependencies to be present. """ bii_dir = os.path.join(cont.named_cache_dir("cmake-build"), "bii") _move_ignore_enoent(bii_dir, os.path.join(os.getcwd(), "bii")) try: yield finally: _move_ignore_enoent(os.path.join(os.getcwd(), "bii"), bii_dir) def run(cont, util, shell, argv=None): """Submit coverage total to coveralls, with bii specific preparation.""" with _bii_deps_in_place(cont): util.fetch_and_import("coverage/cmake/coverage.py").run(cont, util, shell, argv)
08300895dc8d2abb740dd71b027e9acda8bb84dd
chatterbot/ext/django_chatterbot/views.py
chatterbot/ext/django_chatterbot/views.py
from django.views.generic import View from django.http import JsonResponse from django.conf import settings class ChatterBotView(View): def post(self, request, *args, **kwargs): input_statement = request.POST.get('text') response_data = settings.CHATTERBOT.get_response(input_statement) return JsonResponse(response_data, status=200) def get(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def patch(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def delete(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405)
from django.views.generic import View from django.http import JsonResponse from django.conf import settings from chatterbot import ChatBot class ChatterBotView(View): chatterbot = ChatBot( settings.CHATTERBOT['NAME'], storage_adapter='chatterbot.adapters.storage.DjangoStorageAdapter', input_adapter='chatterbot.adapters.input.VariableInputTypeAdapter', output_adapter='chatterbot.adapters.output.OutputFormatAdapter', output_format='json' ) def post(self, request, *args, **kwargs): input_statement = request.POST.get('text') response_data = self.chatterbot.get_response(input_statement) return JsonResponse(response_data, status=200) def get(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def patch(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def delete(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405)
Initialize ChatterBot in django view module instead of settings.
Initialize ChatterBot in django view module instead of settings.
Python
bsd-3-clause
Reinaesaya/OUIRL-ChatBot,Reinaesaya/OUIRL-ChatBot,vkosuri/ChatterBot,davizucon/ChatterBot,gunthercox/ChatterBot,maclogan/VirtualPenPal,Gustavo6046/ChatterBot
from django.views.generic import View from django.http import JsonResponse from django.conf import settings class ChatterBotView(View): def post(self, request, *args, **kwargs): input_statement = request.POST.get('text') response_data = settings.CHATTERBOT.get_response(input_statement) return JsonResponse(response_data, status=200) def get(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def patch(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def delete(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) Initialize ChatterBot in django view module instead of settings.
from django.views.generic import View from django.http import JsonResponse from django.conf import settings from chatterbot import ChatBot class ChatterBotView(View): chatterbot = ChatBot( settings.CHATTERBOT['NAME'], storage_adapter='chatterbot.adapters.storage.DjangoStorageAdapter', input_adapter='chatterbot.adapters.input.VariableInputTypeAdapter', output_adapter='chatterbot.adapters.output.OutputFormatAdapter', output_format='json' ) def post(self, request, *args, **kwargs): input_statement = request.POST.get('text') response_data = self.chatterbot.get_response(input_statement) return JsonResponse(response_data, status=200) def get(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def patch(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def delete(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405)
<commit_before>from django.views.generic import View from django.http import JsonResponse from django.conf import settings class ChatterBotView(View): def post(self, request, *args, **kwargs): input_statement = request.POST.get('text') response_data = settings.CHATTERBOT.get_response(input_statement) return JsonResponse(response_data, status=200) def get(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def patch(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def delete(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) <commit_msg>Initialize ChatterBot in django view module instead of settings.<commit_after>
from django.views.generic import View from django.http import JsonResponse from django.conf import settings from chatterbot import ChatBot class ChatterBotView(View): chatterbot = ChatBot( settings.CHATTERBOT['NAME'], storage_adapter='chatterbot.adapters.storage.DjangoStorageAdapter', input_adapter='chatterbot.adapters.input.VariableInputTypeAdapter', output_adapter='chatterbot.adapters.output.OutputFormatAdapter', output_format='json' ) def post(self, request, *args, **kwargs): input_statement = request.POST.get('text') response_data = self.chatterbot.get_response(input_statement) return JsonResponse(response_data, status=200) def get(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def patch(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def delete(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405)
from django.views.generic import View from django.http import JsonResponse from django.conf import settings class ChatterBotView(View): def post(self, request, *args, **kwargs): input_statement = request.POST.get('text') response_data = settings.CHATTERBOT.get_response(input_statement) return JsonResponse(response_data, status=200) def get(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def patch(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def delete(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) Initialize ChatterBot in django view module instead of settings.from django.views.generic import View from django.http import JsonResponse from django.conf import settings from chatterbot import ChatBot class ChatterBotView(View): chatterbot = ChatBot( settings.CHATTERBOT['NAME'], storage_adapter='chatterbot.adapters.storage.DjangoStorageAdapter', input_adapter='chatterbot.adapters.input.VariableInputTypeAdapter', output_adapter='chatterbot.adapters.output.OutputFormatAdapter', output_format='json' ) def post(self, request, *args, **kwargs): input_statement = request.POST.get('text') response_data = self.chatterbot.get_response(input_statement) return JsonResponse(response_data, status=200) def get(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def patch(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def delete(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405)
<commit_before>from django.views.generic import View from django.http import JsonResponse from django.conf import settings class ChatterBotView(View): def post(self, request, *args, **kwargs): input_statement = request.POST.get('text') response_data = settings.CHATTERBOT.get_response(input_statement) return JsonResponse(response_data, status=200) def get(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def patch(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def delete(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) <commit_msg>Initialize ChatterBot in django view module instead of settings.<commit_after>from django.views.generic import View from django.http import JsonResponse from django.conf import settings from chatterbot import ChatBot class ChatterBotView(View): chatterbot = ChatBot( settings.CHATTERBOT['NAME'], storage_adapter='chatterbot.adapters.storage.DjangoStorageAdapter', input_adapter='chatterbot.adapters.input.VariableInputTypeAdapter', output_adapter='chatterbot.adapters.output.OutputFormatAdapter', output_format='json' ) def post(self, request, *args, **kwargs): input_statement = request.POST.get('text') response_data = self.chatterbot.get_response(input_statement) return JsonResponse(response_data, status=200) def get(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def patch(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405) def delete(self, request, *args, **kwargs): data = { 'detail': 'You should make a POST request to this endpoint.' } # Return a method not allowed response return JsonResponse(data, status=405)
5c0c2f470451c69a2b4cbba3746d26207c6b17a9
lang/__init__.py
lang/__init__.py
from . import tokenizer, ast, codegen import sys, os, subprocess TRIPLES = { 'darwin': 'x86_64-apple-darwin11.0.0', 'linux2': 'x86_64-pc-linux-gnu', } def llir(fn, full=True): src = codegen.source(ast.parse(tokenizer.tokenize(open(fn)))) if not full: return src std = [] for fn in sorted(os.listdir('rt')): with open(os.path.join('rt', fn)) as f: std.append(f.read() + '\n') triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform] return triple + ''.join(std) + src def compile(fn, outfn): llfn = fn + '.ll' with open(llfn, 'w') as f: f.write(llir(fn)) subprocess.check_call(('clang', '-o', outfn, llfn)) os.unlink(llfn)
from . import tokenizer, ast, codegen import sys, os, subprocess BASE = os.path.dirname(__path__[0]) RT_DIR = os.path.join(BASE, 'rt') TRIPLES = { 'darwin': 'x86_64-apple-darwin11.0.0', 'linux2': 'x86_64-pc-linux-gnu', } def llir(fn, full=True): src = codegen.source(ast.parse(tokenizer.tokenize(open(fn)))) if not full: return src std = [] for fn in sorted(os.listdir(RT_DIR)): with open(os.path.join(RT_DIR, fn)) as f: std.append(f.read() + '\n') triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform] return triple + ''.join(std) + src def compile(fn, outfn): llfn = fn + '.ll' with open(llfn, 'w') as f: f.write(llir(fn)) subprocess.check_call(('clang', '-o', outfn, llfn)) os.unlink(llfn)
Fix up path to runtime library directory.
Fix up path to runtime library directory.
Python
mit
djc/runa,djc/runa,djc/runa,djc/runa
from . import tokenizer, ast, codegen import sys, os, subprocess TRIPLES = { 'darwin': 'x86_64-apple-darwin11.0.0', 'linux2': 'x86_64-pc-linux-gnu', } def llir(fn, full=True): src = codegen.source(ast.parse(tokenizer.tokenize(open(fn)))) if not full: return src std = [] for fn in sorted(os.listdir('rt')): with open(os.path.join('rt', fn)) as f: std.append(f.read() + '\n') triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform] return triple + ''.join(std) + src def compile(fn, outfn): llfn = fn + '.ll' with open(llfn, 'w') as f: f.write(llir(fn)) subprocess.check_call(('clang', '-o', outfn, llfn)) os.unlink(llfn) Fix up path to runtime library directory.
from . import tokenizer, ast, codegen import sys, os, subprocess BASE = os.path.dirname(__path__[0]) RT_DIR = os.path.join(BASE, 'rt') TRIPLES = { 'darwin': 'x86_64-apple-darwin11.0.0', 'linux2': 'x86_64-pc-linux-gnu', } def llir(fn, full=True): src = codegen.source(ast.parse(tokenizer.tokenize(open(fn)))) if not full: return src std = [] for fn in sorted(os.listdir(RT_DIR)): with open(os.path.join(RT_DIR, fn)) as f: std.append(f.read() + '\n') triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform] return triple + ''.join(std) + src def compile(fn, outfn): llfn = fn + '.ll' with open(llfn, 'w') as f: f.write(llir(fn)) subprocess.check_call(('clang', '-o', outfn, llfn)) os.unlink(llfn)
<commit_before>from . import tokenizer, ast, codegen import sys, os, subprocess TRIPLES = { 'darwin': 'x86_64-apple-darwin11.0.0', 'linux2': 'x86_64-pc-linux-gnu', } def llir(fn, full=True): src = codegen.source(ast.parse(tokenizer.tokenize(open(fn)))) if not full: return src std = [] for fn in sorted(os.listdir('rt')): with open(os.path.join('rt', fn)) as f: std.append(f.read() + '\n') triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform] return triple + ''.join(std) + src def compile(fn, outfn): llfn = fn + '.ll' with open(llfn, 'w') as f: f.write(llir(fn)) subprocess.check_call(('clang', '-o', outfn, llfn)) os.unlink(llfn) <commit_msg>Fix up path to runtime library directory.<commit_after>
from . import tokenizer, ast, codegen import sys, os, subprocess BASE = os.path.dirname(__path__[0]) RT_DIR = os.path.join(BASE, 'rt') TRIPLES = { 'darwin': 'x86_64-apple-darwin11.0.0', 'linux2': 'x86_64-pc-linux-gnu', } def llir(fn, full=True): src = codegen.source(ast.parse(tokenizer.tokenize(open(fn)))) if not full: return src std = [] for fn in sorted(os.listdir(RT_DIR)): with open(os.path.join(RT_DIR, fn)) as f: std.append(f.read() + '\n') triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform] return triple + ''.join(std) + src def compile(fn, outfn): llfn = fn + '.ll' with open(llfn, 'w') as f: f.write(llir(fn)) subprocess.check_call(('clang', '-o', outfn, llfn)) os.unlink(llfn)
from . import tokenizer, ast, codegen import sys, os, subprocess TRIPLES = { 'darwin': 'x86_64-apple-darwin11.0.0', 'linux2': 'x86_64-pc-linux-gnu', } def llir(fn, full=True): src = codegen.source(ast.parse(tokenizer.tokenize(open(fn)))) if not full: return src std = [] for fn in sorted(os.listdir('rt')): with open(os.path.join('rt', fn)) as f: std.append(f.read() + '\n') triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform] return triple + ''.join(std) + src def compile(fn, outfn): llfn = fn + '.ll' with open(llfn, 'w') as f: f.write(llir(fn)) subprocess.check_call(('clang', '-o', outfn, llfn)) os.unlink(llfn) Fix up path to runtime library directory.from . import tokenizer, ast, codegen import sys, os, subprocess BASE = os.path.dirname(__path__[0]) RT_DIR = os.path.join(BASE, 'rt') TRIPLES = { 'darwin': 'x86_64-apple-darwin11.0.0', 'linux2': 'x86_64-pc-linux-gnu', } def llir(fn, full=True): src = codegen.source(ast.parse(tokenizer.tokenize(open(fn)))) if not full: return src std = [] for fn in sorted(os.listdir(RT_DIR)): with open(os.path.join(RT_DIR, fn)) as f: std.append(f.read() + '\n') triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform] return triple + ''.join(std) + src def compile(fn, outfn): llfn = fn + '.ll' with open(llfn, 'w') as f: f.write(llir(fn)) subprocess.check_call(('clang', '-o', outfn, llfn)) os.unlink(llfn)
<commit_before>from . import tokenizer, ast, codegen import sys, os, subprocess TRIPLES = { 'darwin': 'x86_64-apple-darwin11.0.0', 'linux2': 'x86_64-pc-linux-gnu', } def llir(fn, full=True): src = codegen.source(ast.parse(tokenizer.tokenize(open(fn)))) if not full: return src std = [] for fn in sorted(os.listdir('rt')): with open(os.path.join('rt', fn)) as f: std.append(f.read() + '\n') triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform] return triple + ''.join(std) + src def compile(fn, outfn): llfn = fn + '.ll' with open(llfn, 'w') as f: f.write(llir(fn)) subprocess.check_call(('clang', '-o', outfn, llfn)) os.unlink(llfn) <commit_msg>Fix up path to runtime library directory.<commit_after>from . import tokenizer, ast, codegen import sys, os, subprocess BASE = os.path.dirname(__path__[0]) RT_DIR = os.path.join(BASE, 'rt') TRIPLES = { 'darwin': 'x86_64-apple-darwin11.0.0', 'linux2': 'x86_64-pc-linux-gnu', } def llir(fn, full=True): src = codegen.source(ast.parse(tokenizer.tokenize(open(fn)))) if not full: return src std = [] for fn in sorted(os.listdir(RT_DIR)): with open(os.path.join(RT_DIR, fn)) as f: std.append(f.read() + '\n') triple = 'target triple = "%s"\n\n' % TRIPLES[sys.platform] return triple + ''.join(std) + src def compile(fn, outfn): llfn = fn + '.ll' with open(llfn, 'w') as f: f.write(llir(fn)) subprocess.check_call(('clang', '-o', outfn, llfn)) os.unlink(llfn)
4c00c394e4015a7cae5c5de574d996b20252ea3f
corgi/numpy_utils.py
corgi/numpy_utils.py
def remove_low_variance(X, low_variance_threshold=1e-4, axis=0): low_variance = X.var(axis=axis) < low_variance_threshold X[:, ~low_variance] return X def normalize(X): zero_mean = X - X.mean() return (zero_mean + zero_mean.min()) / X.max()
def remove_low_variance(X, low_variance_threshold=1e-4, axis=0): low_variance = X.var(axis=axis) < low_variance_threshold X[:, ~low_variance] return X def normalize(X): zero_mean = X - X.mean() return (zero_mean + zero_mean.min()) / X.max() def independent_columns(A, tol=1e-05): """ Return an array composed of independent columns of A. Note the answer may not be unique; this function returns one of many possible answers. http://stackoverflow.com/q/13312498/190597 (user1812712) http://math.stackexchange.com/a/199132/1140 (Gerry Myerson) http://mail.scipy.org/pipermail/numpy-discussion/2008-November/038705.html (Anne Archibald) >>> A = np.array([(2,4,1,3),(-1,-2,1,0),(0,0,2,2),(3,6,2,5)]) 2 4 1 3 -1 -2 1 0 0 0 2 2 3 6 2 5 # try with checking the rank of matrices >>> independent_columns(A) np.array([[ 2, 1], [-1, 1], [ 0, 2], [ 3, 2]]), np.array([0, 2]) """ # TODO need a source for diag(R) > 0, being the independent indices of A Q, R = np.linalg.qr(A) independent = np.where(np.abs(R.diagonal()) > tol)[0] return A[:, independent], independent
Add a function for removing & finding independent columns from an array
Add a function for removing & finding independent columns from an array
Python
mit
log0ymxm/corgi
def remove_low_variance(X, low_variance_threshold=1e-4, axis=0): low_variance = X.var(axis=axis) < low_variance_threshold X[:, ~low_variance] return X def normalize(X): zero_mean = X - X.mean() return (zero_mean + zero_mean.min()) / X.max() Add a function for removing & finding independent columns from an array
def remove_low_variance(X, low_variance_threshold=1e-4, axis=0): low_variance = X.var(axis=axis) < low_variance_threshold X[:, ~low_variance] return X def normalize(X): zero_mean = X - X.mean() return (zero_mean + zero_mean.min()) / X.max() def independent_columns(A, tol=1e-05): """ Return an array composed of independent columns of A. Note the answer may not be unique; this function returns one of many possible answers. http://stackoverflow.com/q/13312498/190597 (user1812712) http://math.stackexchange.com/a/199132/1140 (Gerry Myerson) http://mail.scipy.org/pipermail/numpy-discussion/2008-November/038705.html (Anne Archibald) >>> A = np.array([(2,4,1,3),(-1,-2,1,0),(0,0,2,2),(3,6,2,5)]) 2 4 1 3 -1 -2 1 0 0 0 2 2 3 6 2 5 # try with checking the rank of matrices >>> independent_columns(A) np.array([[ 2, 1], [-1, 1], [ 0, 2], [ 3, 2]]), np.array([0, 2]) """ # TODO need a source for diag(R) > 0, being the independent indices of A Q, R = np.linalg.qr(A) independent = np.where(np.abs(R.diagonal()) > tol)[0] return A[:, independent], independent
<commit_before>def remove_low_variance(X, low_variance_threshold=1e-4, axis=0): low_variance = X.var(axis=axis) < low_variance_threshold X[:, ~low_variance] return X def normalize(X): zero_mean = X - X.mean() return (zero_mean + zero_mean.min()) / X.max() <commit_msg>Add a function for removing & finding independent columns from an array<commit_after>
def remove_low_variance(X, low_variance_threshold=1e-4, axis=0): low_variance = X.var(axis=axis) < low_variance_threshold X[:, ~low_variance] return X def normalize(X): zero_mean = X - X.mean() return (zero_mean + zero_mean.min()) / X.max() def independent_columns(A, tol=1e-05): """ Return an array composed of independent columns of A. Note the answer may not be unique; this function returns one of many possible answers. http://stackoverflow.com/q/13312498/190597 (user1812712) http://math.stackexchange.com/a/199132/1140 (Gerry Myerson) http://mail.scipy.org/pipermail/numpy-discussion/2008-November/038705.html (Anne Archibald) >>> A = np.array([(2,4,1,3),(-1,-2,1,0),(0,0,2,2),(3,6,2,5)]) 2 4 1 3 -1 -2 1 0 0 0 2 2 3 6 2 5 # try with checking the rank of matrices >>> independent_columns(A) np.array([[ 2, 1], [-1, 1], [ 0, 2], [ 3, 2]]), np.array([0, 2]) """ # TODO need a source for diag(R) > 0, being the independent indices of A Q, R = np.linalg.qr(A) independent = np.where(np.abs(R.diagonal()) > tol)[0] return A[:, independent], independent
def remove_low_variance(X, low_variance_threshold=1e-4, axis=0): low_variance = X.var(axis=axis) < low_variance_threshold X[:, ~low_variance] return X def normalize(X): zero_mean = X - X.mean() return (zero_mean + zero_mean.min()) / X.max() Add a function for removing & finding independent columns from an arraydef remove_low_variance(X, low_variance_threshold=1e-4, axis=0): low_variance = X.var(axis=axis) < low_variance_threshold X[:, ~low_variance] return X def normalize(X): zero_mean = X - X.mean() return (zero_mean + zero_mean.min()) / X.max() def independent_columns(A, tol=1e-05): """ Return an array composed of independent columns of A. Note the answer may not be unique; this function returns one of many possible answers. http://stackoverflow.com/q/13312498/190597 (user1812712) http://math.stackexchange.com/a/199132/1140 (Gerry Myerson) http://mail.scipy.org/pipermail/numpy-discussion/2008-November/038705.html (Anne Archibald) >>> A = np.array([(2,4,1,3),(-1,-2,1,0),(0,0,2,2),(3,6,2,5)]) 2 4 1 3 -1 -2 1 0 0 0 2 2 3 6 2 5 # try with checking the rank of matrices >>> independent_columns(A) np.array([[ 2, 1], [-1, 1], [ 0, 2], [ 3, 2]]), np.array([0, 2]) """ # TODO need a source for diag(R) > 0, being the independent indices of A Q, R = np.linalg.qr(A) independent = np.where(np.abs(R.diagonal()) > tol)[0] return A[:, independent], independent
<commit_before>def remove_low_variance(X, low_variance_threshold=1e-4, axis=0): low_variance = X.var(axis=axis) < low_variance_threshold X[:, ~low_variance] return X def normalize(X): zero_mean = X - X.mean() return (zero_mean + zero_mean.min()) / X.max() <commit_msg>Add a function for removing & finding independent columns from an array<commit_after>def remove_low_variance(X, low_variance_threshold=1e-4, axis=0): low_variance = X.var(axis=axis) < low_variance_threshold X[:, ~low_variance] return X def normalize(X): zero_mean = X - X.mean() return (zero_mean + zero_mean.min()) / X.max() def independent_columns(A, tol=1e-05): """ Return an array composed of independent columns of A. Note the answer may not be unique; this function returns one of many possible answers. http://stackoverflow.com/q/13312498/190597 (user1812712) http://math.stackexchange.com/a/199132/1140 (Gerry Myerson) http://mail.scipy.org/pipermail/numpy-discussion/2008-November/038705.html (Anne Archibald) >>> A = np.array([(2,4,1,3),(-1,-2,1,0),(0,0,2,2),(3,6,2,5)]) 2 4 1 3 -1 -2 1 0 0 0 2 2 3 6 2 5 # try with checking the rank of matrices >>> independent_columns(A) np.array([[ 2, 1], [-1, 1], [ 0, 2], [ 3, 2]]), np.array([0, 2]) """ # TODO need a source for diag(R) > 0, being the independent indices of A Q, R = np.linalg.qr(A) independent = np.where(np.abs(R.diagonal()) > tol)[0] return A[:, independent], independent
129cd22de51d58cc956ca5586fc15cd2e247446b
gpkitmodels/GP/aircraft/prop/propeller.py
gpkitmodels/GP/aircraft/prop/propeller.py
" propeller model " from numpy import pi from gpkit import Model, parse_variables, SignomialsEnabled, SignomialEquality class Propeller(Model): """ Propeller Model Variables --------- R 10 [m] prop radius """ def setup(self): exec parse_variables(Propeller.__doc__) def performance(state): return Propeller_Performance(self, state) class Propeller_Performance(Model): """ Propeller Model Variables --------- T [N] thrust Tc [-] coefficient of thrust etaadd 0.7 [-] swirl and nonuniformity losses etav 0.85 [-] viscous losses etai [-] inviscid losses eta [-] overall efficiency z1 self.helper [-] efficiency helper 1 z2 [-] efficiency helper 2 """ def helper(self, c): return 2. - 1./c[self.etaadd] def setup(self, state): exec parse_variables(Propeller.__doc__) V = state.V rho = state.rho constraints = [eta <= etav*etai, Tc == T/(0.5*rho*V**2*pi*R**2), z2 >= Tc + 1, etai*(z1 + z2**0.5/etaadd) <= 2] return constraints
" propeller model " from numpy import pi from gpkit import Model, parse_variables, SignomialsEnabled, SignomialEquality class Propeller(Model): """ Propeller Model Variables --------- R 10 [m] prop radius W 10 [lbf] prop weight """ def setup(self): exec parse_variables(Propeller.__doc__) def performance(state): return Propeller_Performance(self, state) class Propeller_Performance(Model): """ Propeller Model Variables --------- T [N] thrust Tc [-] coefficient of thrust etaadd 0.7 [-] swirl and nonuniformity losses etav 0.85 [-] viscous losses etai [-] inviscid losses eta [-] overall efficiency z1 self.helper [-] efficiency helper 1 z2 [-] efficiency helper 2 """ def helper(self, c): return 2. - 1./c[self.etaadd] def setup(self,parent, state): exec parse_variables(Propeller.__doc__) V = state.V rho = state.rho R = parent.R constraints = [eta <= etav*etai, Tc == T/(0.5*rho*V**2*pi*R**2), z2 >= Tc + 1, etai*(z1 + z2**0.5/etaadd) <= 2] return constraints
Add prop structure and performance models
Add prop structure and performance models
Python
mit
convexengineering/gplibrary,convexengineering/gplibrary
" propeller model " from numpy import pi from gpkit import Model, parse_variables, SignomialsEnabled, SignomialEquality class Propeller(Model): """ Propeller Model Variables --------- R 10 [m] prop radius """ def setup(self): exec parse_variables(Propeller.__doc__) def performance(state): return Propeller_Performance(self, state) class Propeller_Performance(Model): """ Propeller Model Variables --------- T [N] thrust Tc [-] coefficient of thrust etaadd 0.7 [-] swirl and nonuniformity losses etav 0.85 [-] viscous losses etai [-] inviscid losses eta [-] overall efficiency z1 self.helper [-] efficiency helper 1 z2 [-] efficiency helper 2 """ def helper(self, c): return 2. - 1./c[self.etaadd] def setup(self, state): exec parse_variables(Propeller.__doc__) V = state.V rho = state.rho constraints = [eta <= etav*etai, Tc == T/(0.5*rho*V**2*pi*R**2), z2 >= Tc + 1, etai*(z1 + z2**0.5/etaadd) <= 2] return constraintsAdd prop structure and performance models
" propeller model " from numpy import pi from gpkit import Model, parse_variables, SignomialsEnabled, SignomialEquality class Propeller(Model): """ Propeller Model Variables --------- R 10 [m] prop radius W 10 [lbf] prop weight """ def setup(self): exec parse_variables(Propeller.__doc__) def performance(state): return Propeller_Performance(self, state) class Propeller_Performance(Model): """ Propeller Model Variables --------- T [N] thrust Tc [-] coefficient of thrust etaadd 0.7 [-] swirl and nonuniformity losses etav 0.85 [-] viscous losses etai [-] inviscid losses eta [-] overall efficiency z1 self.helper [-] efficiency helper 1 z2 [-] efficiency helper 2 """ def helper(self, c): return 2. - 1./c[self.etaadd] def setup(self,parent, state): exec parse_variables(Propeller.__doc__) V = state.V rho = state.rho R = parent.R constraints = [eta <= etav*etai, Tc == T/(0.5*rho*V**2*pi*R**2), z2 >= Tc + 1, etai*(z1 + z2**0.5/etaadd) <= 2] return constraints
<commit_before>" propeller model " from numpy import pi from gpkit import Model, parse_variables, SignomialsEnabled, SignomialEquality class Propeller(Model): """ Propeller Model Variables --------- R 10 [m] prop radius """ def setup(self): exec parse_variables(Propeller.__doc__) def performance(state): return Propeller_Performance(self, state) class Propeller_Performance(Model): """ Propeller Model Variables --------- T [N] thrust Tc [-] coefficient of thrust etaadd 0.7 [-] swirl and nonuniformity losses etav 0.85 [-] viscous losses etai [-] inviscid losses eta [-] overall efficiency z1 self.helper [-] efficiency helper 1 z2 [-] efficiency helper 2 """ def helper(self, c): return 2. - 1./c[self.etaadd] def setup(self, state): exec parse_variables(Propeller.__doc__) V = state.V rho = state.rho constraints = [eta <= etav*etai, Tc == T/(0.5*rho*V**2*pi*R**2), z2 >= Tc + 1, etai*(z1 + z2**0.5/etaadd) <= 2] return constraints<commit_msg>Add prop structure and performance models<commit_after>
" propeller model " from numpy import pi from gpkit import Model, parse_variables, SignomialsEnabled, SignomialEquality class Propeller(Model): """ Propeller Model Variables --------- R 10 [m] prop radius W 10 [lbf] prop weight """ def setup(self): exec parse_variables(Propeller.__doc__) def performance(state): return Propeller_Performance(self, state) class Propeller_Performance(Model): """ Propeller Model Variables --------- T [N] thrust Tc [-] coefficient of thrust etaadd 0.7 [-] swirl and nonuniformity losses etav 0.85 [-] viscous losses etai [-] inviscid losses eta [-] overall efficiency z1 self.helper [-] efficiency helper 1 z2 [-] efficiency helper 2 """ def helper(self, c): return 2. - 1./c[self.etaadd] def setup(self,parent, state): exec parse_variables(Propeller.__doc__) V = state.V rho = state.rho R = parent.R constraints = [eta <= etav*etai, Tc == T/(0.5*rho*V**2*pi*R**2), z2 >= Tc + 1, etai*(z1 + z2**0.5/etaadd) <= 2] return constraints
" propeller model " from numpy import pi from gpkit import Model, parse_variables, SignomialsEnabled, SignomialEquality class Propeller(Model): """ Propeller Model Variables --------- R 10 [m] prop radius """ def setup(self): exec parse_variables(Propeller.__doc__) def performance(state): return Propeller_Performance(self, state) class Propeller_Performance(Model): """ Propeller Model Variables --------- T [N] thrust Tc [-] coefficient of thrust etaadd 0.7 [-] swirl and nonuniformity losses etav 0.85 [-] viscous losses etai [-] inviscid losses eta [-] overall efficiency z1 self.helper [-] efficiency helper 1 z2 [-] efficiency helper 2 """ def helper(self, c): return 2. - 1./c[self.etaadd] def setup(self, state): exec parse_variables(Propeller.__doc__) V = state.V rho = state.rho constraints = [eta <= etav*etai, Tc == T/(0.5*rho*V**2*pi*R**2), z2 >= Tc + 1, etai*(z1 + z2**0.5/etaadd) <= 2] return constraintsAdd prop structure and performance models" propeller model " from numpy import pi from gpkit import Model, parse_variables, SignomialsEnabled, SignomialEquality class Propeller(Model): """ Propeller Model Variables --------- R 10 [m] prop radius W 10 [lbf] prop weight """ def setup(self): exec parse_variables(Propeller.__doc__) def performance(state): return Propeller_Performance(self, state) class Propeller_Performance(Model): """ Propeller Model Variables --------- T [N] thrust Tc [-] coefficient of thrust etaadd 0.7 [-] swirl and nonuniformity losses etav 0.85 [-] viscous losses etai [-] inviscid losses eta [-] overall efficiency z1 self.helper [-] efficiency helper 1 z2 [-] efficiency helper 2 """ def helper(self, c): return 2. - 1./c[self.etaadd] def setup(self,parent, state): exec parse_variables(Propeller.__doc__) V = state.V rho = state.rho R = parent.R constraints = [eta <= etav*etai, Tc == T/(0.5*rho*V**2*pi*R**2), z2 >= Tc + 1, etai*(z1 + z2**0.5/etaadd) <= 2] return constraints
<commit_before>" propeller model " from numpy import pi from gpkit import Model, parse_variables, SignomialsEnabled, SignomialEquality class Propeller(Model): """ Propeller Model Variables --------- R 10 [m] prop radius """ def setup(self): exec parse_variables(Propeller.__doc__) def performance(state): return Propeller_Performance(self, state) class Propeller_Performance(Model): """ Propeller Model Variables --------- T [N] thrust Tc [-] coefficient of thrust etaadd 0.7 [-] swirl and nonuniformity losses etav 0.85 [-] viscous losses etai [-] inviscid losses eta [-] overall efficiency z1 self.helper [-] efficiency helper 1 z2 [-] efficiency helper 2 """ def helper(self, c): return 2. - 1./c[self.etaadd] def setup(self, state): exec parse_variables(Propeller.__doc__) V = state.V rho = state.rho constraints = [eta <= etav*etai, Tc == T/(0.5*rho*V**2*pi*R**2), z2 >= Tc + 1, etai*(z1 + z2**0.5/etaadd) <= 2] return constraints<commit_msg>Add prop structure and performance models<commit_after>" propeller model " from numpy import pi from gpkit import Model, parse_variables, SignomialsEnabled, SignomialEquality class Propeller(Model): """ Propeller Model Variables --------- R 10 [m] prop radius W 10 [lbf] prop weight """ def setup(self): exec parse_variables(Propeller.__doc__) def performance(state): return Propeller_Performance(self, state) class Propeller_Performance(Model): """ Propeller Model Variables --------- T [N] thrust Tc [-] coefficient of thrust etaadd 0.7 [-] swirl and nonuniformity losses etav 0.85 [-] viscous losses etai [-] inviscid losses eta [-] overall efficiency z1 self.helper [-] efficiency helper 1 z2 [-] efficiency helper 2 """ def helper(self, c): return 2. - 1./c[self.etaadd] def setup(self,parent, state): exec parse_variables(Propeller.__doc__) V = state.V rho = state.rho R = parent.R constraints = [eta <= etav*etai, Tc == T/(0.5*rho*V**2*pi*R**2), z2 >= Tc + 1, etai*(z1 + z2**0.5/etaadd) <= 2] return constraints
085a61894143b525a8cf8fe7f2029e4993a4279a
lib/exp/filters/ransac.py
lib/exp/filters/ransac.py
import numpy as np import cv2 from core import KpFilter class Ransac(KpFilter): def __init__(self, data): KpFilter.__init__(self, data) def __good_pts(self, kps, mps): rr = [kps[m].pt for m in mps] return np.float32(rr).reshape(-1, 1, 2) def __compute(self, good, skp, vkp, min_matches=10): """ data is a dict contained columns: [ 'kp_train', 'kp_test', 'matches'] Methods: 0 - a regular method using all the points CV_RANSAC - RANSAC-based robust method CV_LMEDS - Least-Median robust method """ if len(good) >= min_matches: src = self.__good_pts(skp, good.qix) des = self.__good_pts(vkp, good.tix) M, mask = cv2.\ findHomography(src, des, method=cv2.RANSAC, ransacReprojThreshold=5.0) return M, mask return None, None def filter_(self, min_matches=10): """ Returned by-product: M, the homography boundary """ good, skp, fkp = KpFilter.filter_() M, mask = self.__compute(good, skp, fkp, min_matches=min_matches) self.data['matches']['keep'] = mask return M
import numpy as np import cv2 from core import KpFilter class Ransac(KpFilter): def __init__(self, data): KpFilter.__init__(self, data) def __good_pts(self, kps, mps): rr = [kps[m].pt for m in mps] return np.float32(rr).reshape(-1, 1, 2) def __compute(self, good, skp, vkp, min_matches=10): """ data is a dict contained columns: [ 'kp_train', 'kp_test', 'matches'] Methods: 0 - a regular method using all the points CV_RANSAC - RANSAC-based robust method CV_LMEDS - Least-Median robust method """ if len(good) >= min_matches: src = self.__good_pts(skp, good.qix) des = self.__good_pts(vkp, good.tix) M, mask = cv2.\ findHomography(src, des, method=cv2.RANSAC, ransacReprojThreshold=5.0) return M, mask return None, None def filter_(self, min_matches=10): """ Returned by-product: M, the homography boundary """ good, skp, fkp = KpFilter.filter_(self) M, mask = self.__compute(good, skp, fkp, min_matches=min_matches) self.data['matches']['keep'] = mask return M
Fix filter_ method calling parent method bug
Fix filter_ method calling parent method bug
Python
agpl-3.0
speed-of-light/pyslider
import numpy as np import cv2 from core import KpFilter class Ransac(KpFilter): def __init__(self, data): KpFilter.__init__(self, data) def __good_pts(self, kps, mps): rr = [kps[m].pt for m in mps] return np.float32(rr).reshape(-1, 1, 2) def __compute(self, good, skp, vkp, min_matches=10): """ data is a dict contained columns: [ 'kp_train', 'kp_test', 'matches'] Methods: 0 - a regular method using all the points CV_RANSAC - RANSAC-based robust method CV_LMEDS - Least-Median robust method """ if len(good) >= min_matches: src = self.__good_pts(skp, good.qix) des = self.__good_pts(vkp, good.tix) M, mask = cv2.\ findHomography(src, des, method=cv2.RANSAC, ransacReprojThreshold=5.0) return M, mask return None, None def filter_(self, min_matches=10): """ Returned by-product: M, the homography boundary """ good, skp, fkp = KpFilter.filter_() M, mask = self.__compute(good, skp, fkp, min_matches=min_matches) self.data['matches']['keep'] = mask return M Fix filter_ method calling parent method bug
import numpy as np import cv2 from core import KpFilter class Ransac(KpFilter): def __init__(self, data): KpFilter.__init__(self, data) def __good_pts(self, kps, mps): rr = [kps[m].pt for m in mps] return np.float32(rr).reshape(-1, 1, 2) def __compute(self, good, skp, vkp, min_matches=10): """ data is a dict contained columns: [ 'kp_train', 'kp_test', 'matches'] Methods: 0 - a regular method using all the points CV_RANSAC - RANSAC-based robust method CV_LMEDS - Least-Median robust method """ if len(good) >= min_matches: src = self.__good_pts(skp, good.qix) des = self.__good_pts(vkp, good.tix) M, mask = cv2.\ findHomography(src, des, method=cv2.RANSAC, ransacReprojThreshold=5.0) return M, mask return None, None def filter_(self, min_matches=10): """ Returned by-product: M, the homography boundary """ good, skp, fkp = KpFilter.filter_(self) M, mask = self.__compute(good, skp, fkp, min_matches=min_matches) self.data['matches']['keep'] = mask return M
<commit_before>import numpy as np import cv2 from core import KpFilter class Ransac(KpFilter): def __init__(self, data): KpFilter.__init__(self, data) def __good_pts(self, kps, mps): rr = [kps[m].pt for m in mps] return np.float32(rr).reshape(-1, 1, 2) def __compute(self, good, skp, vkp, min_matches=10): """ data is a dict contained columns: [ 'kp_train', 'kp_test', 'matches'] Methods: 0 - a regular method using all the points CV_RANSAC - RANSAC-based robust method CV_LMEDS - Least-Median robust method """ if len(good) >= min_matches: src = self.__good_pts(skp, good.qix) des = self.__good_pts(vkp, good.tix) M, mask = cv2.\ findHomography(src, des, method=cv2.RANSAC, ransacReprojThreshold=5.0) return M, mask return None, None def filter_(self, min_matches=10): """ Returned by-product: M, the homography boundary """ good, skp, fkp = KpFilter.filter_() M, mask = self.__compute(good, skp, fkp, min_matches=min_matches) self.data['matches']['keep'] = mask return M <commit_msg>Fix filter_ method calling parent method bug<commit_after>
import numpy as np import cv2 from core import KpFilter class Ransac(KpFilter): def __init__(self, data): KpFilter.__init__(self, data) def __good_pts(self, kps, mps): rr = [kps[m].pt for m in mps] return np.float32(rr).reshape(-1, 1, 2) def __compute(self, good, skp, vkp, min_matches=10): """ data is a dict contained columns: [ 'kp_train', 'kp_test', 'matches'] Methods: 0 - a regular method using all the points CV_RANSAC - RANSAC-based robust method CV_LMEDS - Least-Median robust method """ if len(good) >= min_matches: src = self.__good_pts(skp, good.qix) des = self.__good_pts(vkp, good.tix) M, mask = cv2.\ findHomography(src, des, method=cv2.RANSAC, ransacReprojThreshold=5.0) return M, mask return None, None def filter_(self, min_matches=10): """ Returned by-product: M, the homography boundary """ good, skp, fkp = KpFilter.filter_(self) M, mask = self.__compute(good, skp, fkp, min_matches=min_matches) self.data['matches']['keep'] = mask return M
import numpy as np import cv2 from core import KpFilter class Ransac(KpFilter): def __init__(self, data): KpFilter.__init__(self, data) def __good_pts(self, kps, mps): rr = [kps[m].pt for m in mps] return np.float32(rr).reshape(-1, 1, 2) def __compute(self, good, skp, vkp, min_matches=10): """ data is a dict contained columns: [ 'kp_train', 'kp_test', 'matches'] Methods: 0 - a regular method using all the points CV_RANSAC - RANSAC-based robust method CV_LMEDS - Least-Median robust method """ if len(good) >= min_matches: src = self.__good_pts(skp, good.qix) des = self.__good_pts(vkp, good.tix) M, mask = cv2.\ findHomography(src, des, method=cv2.RANSAC, ransacReprojThreshold=5.0) return M, mask return None, None def filter_(self, min_matches=10): """ Returned by-product: M, the homography boundary """ good, skp, fkp = KpFilter.filter_() M, mask = self.__compute(good, skp, fkp, min_matches=min_matches) self.data['matches']['keep'] = mask return M Fix filter_ method calling parent method bugimport numpy as np import cv2 from core import KpFilter class Ransac(KpFilter): def __init__(self, data): KpFilter.__init__(self, data) def __good_pts(self, kps, mps): rr = [kps[m].pt for m in mps] return np.float32(rr).reshape(-1, 1, 2) def __compute(self, good, skp, vkp, min_matches=10): """ data is a dict contained columns: [ 'kp_train', 'kp_test', 'matches'] Methods: 0 - a regular method using all the points CV_RANSAC - RANSAC-based robust method CV_LMEDS - Least-Median robust method """ if len(good) >= min_matches: src = self.__good_pts(skp, good.qix) des = self.__good_pts(vkp, good.tix) M, mask = cv2.\ findHomography(src, des, method=cv2.RANSAC, ransacReprojThreshold=5.0) return M, mask return None, None def filter_(self, min_matches=10): """ Returned by-product: M, the homography boundary """ good, skp, fkp = KpFilter.filter_(self) M, mask = self.__compute(good, skp, fkp, min_matches=min_matches) self.data['matches']['keep'] = mask return M
<commit_before>import numpy as np import cv2 from core import KpFilter class Ransac(KpFilter): def __init__(self, data): KpFilter.__init__(self, data) def __good_pts(self, kps, mps): rr = [kps[m].pt for m in mps] return np.float32(rr).reshape(-1, 1, 2) def __compute(self, good, skp, vkp, min_matches=10): """ data is a dict contained columns: [ 'kp_train', 'kp_test', 'matches'] Methods: 0 - a regular method using all the points CV_RANSAC - RANSAC-based robust method CV_LMEDS - Least-Median robust method """ if len(good) >= min_matches: src = self.__good_pts(skp, good.qix) des = self.__good_pts(vkp, good.tix) M, mask = cv2.\ findHomography(src, des, method=cv2.RANSAC, ransacReprojThreshold=5.0) return M, mask return None, None def filter_(self, min_matches=10): """ Returned by-product: M, the homography boundary """ good, skp, fkp = KpFilter.filter_() M, mask = self.__compute(good, skp, fkp, min_matches=min_matches) self.data['matches']['keep'] = mask return M <commit_msg>Fix filter_ method calling parent method bug<commit_after>import numpy as np import cv2 from core import KpFilter class Ransac(KpFilter): def __init__(self, data): KpFilter.__init__(self, data) def __good_pts(self, kps, mps): rr = [kps[m].pt for m in mps] return np.float32(rr).reshape(-1, 1, 2) def __compute(self, good, skp, vkp, min_matches=10): """ data is a dict contained columns: [ 'kp_train', 'kp_test', 'matches'] Methods: 0 - a regular method using all the points CV_RANSAC - RANSAC-based robust method CV_LMEDS - Least-Median robust method """ if len(good) >= min_matches: src = self.__good_pts(skp, good.qix) des = self.__good_pts(vkp, good.tix) M, mask = cv2.\ findHomography(src, des, method=cv2.RANSAC, ransacReprojThreshold=5.0) return M, mask return None, None def filter_(self, min_matches=10): """ Returned by-product: M, the homography boundary """ good, skp, fkp = KpFilter.filter_(self) M, mask = self.__compute(good, skp, fkp, min_matches=min_matches) self.data['matches']['keep'] = mask return M
570bbe3add6a19a7ec6c14adfa04da76d14aa740
common/templatetags/lutris.py
common/templatetags/lutris.py
import copy from django import template from django.conf import settings from games import models register = template.Library() def get_links(user_agent): systems = ['ubuntu', 'fedora', 'linux'] downloads = copy.copy(settings.DOWNLOADS) main_download = None for system in systems: if system in user_agent: main_download = {system: settings.DOWNLOADS[system]} downloads.pop(system) if not main_download: main_download = {'linux': downloads.pop('linux')} return (main_download, downloads) @register.inclusion_tag('includes/download_links.html', takes_context=True) def download_links(context): request = context['request'] user_agent = request.META.get('HTTP_USER_AGENT', '').lower() context['main_download'], context['downloads'] = get_links(user_agent) return context @register.inclusion_tag('includes/featured_slider.html', takes_context=True) def featured_slider(context): context['featured_contents'] = models.Featured.objects.all() return context @register.inclusion_tag('includes/latest_games.html', takes_context=True) def latest_games(context): games = models.Game.objects.published().order_by('-created')[:5] context['latest_games'] = games return context
import copy from django import template from django.conf import settings from games import models register = template.Library() def get_links(user_agent): systems = ['ubuntu', 'fedora', 'linux'] downloads = copy.copy(settings.DOWNLOADS) main_download = None for system in systems: if system in user_agent: main_download = {system: downloads[system]} downloads.pop(system) if not main_download: main_download = {'linux': downloads.pop('linux')} return (main_download, downloads) @register.inclusion_tag('includes/download_links.html', takes_context=True) def download_links(context): request = context['request'] user_agent = request.META.get('HTTP_USER_AGENT', '').lower() context['main_download'], context['downloads'] = get_links(user_agent) return context @register.inclusion_tag('includes/featured_slider.html', takes_context=True) def featured_slider(context): context['featured_contents'] = models.Featured.objects.all() return context @register.inclusion_tag('includes/latest_games.html', takes_context=True) def latest_games(context): games = models.Game.objects.published().order_by('-created')[:5] context['latest_games'] = games return context
Fix bug in download links
Fix bug in download links
Python
agpl-3.0
Turupawn/website,Turupawn/website,Turupawn/website,lutris/website,Turupawn/website,lutris/website,lutris/website,lutris/website
import copy from django import template from django.conf import settings from games import models register = template.Library() def get_links(user_agent): systems = ['ubuntu', 'fedora', 'linux'] downloads = copy.copy(settings.DOWNLOADS) main_download = None for system in systems: if system in user_agent: main_download = {system: settings.DOWNLOADS[system]} downloads.pop(system) if not main_download: main_download = {'linux': downloads.pop('linux')} return (main_download, downloads) @register.inclusion_tag('includes/download_links.html', takes_context=True) def download_links(context): request = context['request'] user_agent = request.META.get('HTTP_USER_AGENT', '').lower() context['main_download'], context['downloads'] = get_links(user_agent) return context @register.inclusion_tag('includes/featured_slider.html', takes_context=True) def featured_slider(context): context['featured_contents'] = models.Featured.objects.all() return context @register.inclusion_tag('includes/latest_games.html', takes_context=True) def latest_games(context): games = models.Game.objects.published().order_by('-created')[:5] context['latest_games'] = games return context Fix bug in download links
import copy from django import template from django.conf import settings from games import models register = template.Library() def get_links(user_agent): systems = ['ubuntu', 'fedora', 'linux'] downloads = copy.copy(settings.DOWNLOADS) main_download = None for system in systems: if system in user_agent: main_download = {system: downloads[system]} downloads.pop(system) if not main_download: main_download = {'linux': downloads.pop('linux')} return (main_download, downloads) @register.inclusion_tag('includes/download_links.html', takes_context=True) def download_links(context): request = context['request'] user_agent = request.META.get('HTTP_USER_AGENT', '').lower() context['main_download'], context['downloads'] = get_links(user_agent) return context @register.inclusion_tag('includes/featured_slider.html', takes_context=True) def featured_slider(context): context['featured_contents'] = models.Featured.objects.all() return context @register.inclusion_tag('includes/latest_games.html', takes_context=True) def latest_games(context): games = models.Game.objects.published().order_by('-created')[:5] context['latest_games'] = games return context
<commit_before>import copy from django import template from django.conf import settings from games import models register = template.Library() def get_links(user_agent): systems = ['ubuntu', 'fedora', 'linux'] downloads = copy.copy(settings.DOWNLOADS) main_download = None for system in systems: if system in user_agent: main_download = {system: settings.DOWNLOADS[system]} downloads.pop(system) if not main_download: main_download = {'linux': downloads.pop('linux')} return (main_download, downloads) @register.inclusion_tag('includes/download_links.html', takes_context=True) def download_links(context): request = context['request'] user_agent = request.META.get('HTTP_USER_AGENT', '').lower() context['main_download'], context['downloads'] = get_links(user_agent) return context @register.inclusion_tag('includes/featured_slider.html', takes_context=True) def featured_slider(context): context['featured_contents'] = models.Featured.objects.all() return context @register.inclusion_tag('includes/latest_games.html', takes_context=True) def latest_games(context): games = models.Game.objects.published().order_by('-created')[:5] context['latest_games'] = games return context <commit_msg>Fix bug in download links<commit_after>
import copy from django import template from django.conf import settings from games import models register = template.Library() def get_links(user_agent): systems = ['ubuntu', 'fedora', 'linux'] downloads = copy.copy(settings.DOWNLOADS) main_download = None for system in systems: if system in user_agent: main_download = {system: downloads[system]} downloads.pop(system) if not main_download: main_download = {'linux': downloads.pop('linux')} return (main_download, downloads) @register.inclusion_tag('includes/download_links.html', takes_context=True) def download_links(context): request = context['request'] user_agent = request.META.get('HTTP_USER_AGENT', '').lower() context['main_download'], context['downloads'] = get_links(user_agent) return context @register.inclusion_tag('includes/featured_slider.html', takes_context=True) def featured_slider(context): context['featured_contents'] = models.Featured.objects.all() return context @register.inclusion_tag('includes/latest_games.html', takes_context=True) def latest_games(context): games = models.Game.objects.published().order_by('-created')[:5] context['latest_games'] = games return context
import copy from django import template from django.conf import settings from games import models register = template.Library() def get_links(user_agent): systems = ['ubuntu', 'fedora', 'linux'] downloads = copy.copy(settings.DOWNLOADS) main_download = None for system in systems: if system in user_agent: main_download = {system: settings.DOWNLOADS[system]} downloads.pop(system) if not main_download: main_download = {'linux': downloads.pop('linux')} return (main_download, downloads) @register.inclusion_tag('includes/download_links.html', takes_context=True) def download_links(context): request = context['request'] user_agent = request.META.get('HTTP_USER_AGENT', '').lower() context['main_download'], context['downloads'] = get_links(user_agent) return context @register.inclusion_tag('includes/featured_slider.html', takes_context=True) def featured_slider(context): context['featured_contents'] = models.Featured.objects.all() return context @register.inclusion_tag('includes/latest_games.html', takes_context=True) def latest_games(context): games = models.Game.objects.published().order_by('-created')[:5] context['latest_games'] = games return context Fix bug in download linksimport copy from django import template from django.conf import settings from games import models register = template.Library() def get_links(user_agent): systems = ['ubuntu', 'fedora', 'linux'] downloads = copy.copy(settings.DOWNLOADS) main_download = None for system in systems: if system in user_agent: main_download = {system: downloads[system]} downloads.pop(system) if not main_download: main_download = {'linux': downloads.pop('linux')} return (main_download, downloads) @register.inclusion_tag('includes/download_links.html', takes_context=True) def download_links(context): request = context['request'] user_agent = request.META.get('HTTP_USER_AGENT', '').lower() context['main_download'], context['downloads'] = get_links(user_agent) return context @register.inclusion_tag('includes/featured_slider.html', takes_context=True) def featured_slider(context): context['featured_contents'] = models.Featured.objects.all() return context @register.inclusion_tag('includes/latest_games.html', takes_context=True) def latest_games(context): games = models.Game.objects.published().order_by('-created')[:5] context['latest_games'] = games return context
<commit_before>import copy from django import template from django.conf import settings from games import models register = template.Library() def get_links(user_agent): systems = ['ubuntu', 'fedora', 'linux'] downloads = copy.copy(settings.DOWNLOADS) main_download = None for system in systems: if system in user_agent: main_download = {system: settings.DOWNLOADS[system]} downloads.pop(system) if not main_download: main_download = {'linux': downloads.pop('linux')} return (main_download, downloads) @register.inclusion_tag('includes/download_links.html', takes_context=True) def download_links(context): request = context['request'] user_agent = request.META.get('HTTP_USER_AGENT', '').lower() context['main_download'], context['downloads'] = get_links(user_agent) return context @register.inclusion_tag('includes/featured_slider.html', takes_context=True) def featured_slider(context): context['featured_contents'] = models.Featured.objects.all() return context @register.inclusion_tag('includes/latest_games.html', takes_context=True) def latest_games(context): games = models.Game.objects.published().order_by('-created')[:5] context['latest_games'] = games return context <commit_msg>Fix bug in download links<commit_after>import copy from django import template from django.conf import settings from games import models register = template.Library() def get_links(user_agent): systems = ['ubuntu', 'fedora', 'linux'] downloads = copy.copy(settings.DOWNLOADS) main_download = None for system in systems: if system in user_agent: main_download = {system: downloads[system]} downloads.pop(system) if not main_download: main_download = {'linux': downloads.pop('linux')} return (main_download, downloads) @register.inclusion_tag('includes/download_links.html', takes_context=True) def download_links(context): request = context['request'] user_agent = request.META.get('HTTP_USER_AGENT', '').lower() context['main_download'], context['downloads'] = get_links(user_agent) return context @register.inclusion_tag('includes/featured_slider.html', takes_context=True) def featured_slider(context): context['featured_contents'] = models.Featured.objects.all() return context @register.inclusion_tag('includes/latest_games.html', takes_context=True) def latest_games(context): games = models.Game.objects.published().order_by('-created')[:5] context['latest_games'] = games return context
2c63efeb705637a068c909be7dc72f18e90561bf
cloudbridge/cloud/providers/azure/test/test_azure_resource_group.py
cloudbridge/cloud/providers/azure/test/test_azure_resource_group.py
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase class AzureResourceGroupTestCase(ProviderTestBase): def test_resource_group_create(self): resource_group_params = {'location': self.provider.region_name} rg = self.provider.azure_client. \ create_resource_group(self.provider.resource_group, resource_group_params) print("Create Resource - " + str(rg)) self.assertTrue( rg.name == "cloudbridge", "Resource Group should be Cloudbridge") def test_resource_group_get(self): rg = self.provider.azure_client.get_resource_group('MyGroup') print("Get Resource - " + str(rg)) self.assertTrue( rg.name == "testResourceGroup", "Resource Group should be Cloudbridge")
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase class AzureResourceGroupTestCase(ProviderTestBase): def test_resource_group_create(self): resource_group_params = {'location': self.provider.region_name} rg = self.provider.azure_client. \ create_resource_group(self.provider.resource_group, resource_group_params) print("Create Resource - " + str(rg)) self.assertTrue( rg.name == self.provider.resource_group, "Resource Group should be {0}".format(rg.name)) def test_resource_group_get(self): rg = self.provider.azure_client.get_resource_group('MyGroup') print("Get Resource - " + str(rg)) self.assertTrue( rg.name == "testResourceGroup", "Resource Group should be {0}".format(rg.name))
Update resource group unit test
Update resource group unit test
Python
mit
ms-azure-cloudbroker/cloudbridge
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase class AzureResourceGroupTestCase(ProviderTestBase): def test_resource_group_create(self): resource_group_params = {'location': self.provider.region_name} rg = self.provider.azure_client. \ create_resource_group(self.provider.resource_group, resource_group_params) print("Create Resource - " + str(rg)) self.assertTrue( rg.name == "cloudbridge", "Resource Group should be Cloudbridge") def test_resource_group_get(self): rg = self.provider.azure_client.get_resource_group('MyGroup') print("Get Resource - " + str(rg)) self.assertTrue( rg.name == "testResourceGroup", "Resource Group should be Cloudbridge") Update resource group unit test
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase class AzureResourceGroupTestCase(ProviderTestBase): def test_resource_group_create(self): resource_group_params = {'location': self.provider.region_name} rg = self.provider.azure_client. \ create_resource_group(self.provider.resource_group, resource_group_params) print("Create Resource - " + str(rg)) self.assertTrue( rg.name == self.provider.resource_group, "Resource Group should be {0}".format(rg.name)) def test_resource_group_get(self): rg = self.provider.azure_client.get_resource_group('MyGroup') print("Get Resource - " + str(rg)) self.assertTrue( rg.name == "testResourceGroup", "Resource Group should be {0}".format(rg.name))
<commit_before>from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase class AzureResourceGroupTestCase(ProviderTestBase): def test_resource_group_create(self): resource_group_params = {'location': self.provider.region_name} rg = self.provider.azure_client. \ create_resource_group(self.provider.resource_group, resource_group_params) print("Create Resource - " + str(rg)) self.assertTrue( rg.name == "cloudbridge", "Resource Group should be Cloudbridge") def test_resource_group_get(self): rg = self.provider.azure_client.get_resource_group('MyGroup') print("Get Resource - " + str(rg)) self.assertTrue( rg.name == "testResourceGroup", "Resource Group should be Cloudbridge") <commit_msg>Update resource group unit test<commit_after>
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase class AzureResourceGroupTestCase(ProviderTestBase): def test_resource_group_create(self): resource_group_params = {'location': self.provider.region_name} rg = self.provider.azure_client. \ create_resource_group(self.provider.resource_group, resource_group_params) print("Create Resource - " + str(rg)) self.assertTrue( rg.name == self.provider.resource_group, "Resource Group should be {0}".format(rg.name)) def test_resource_group_get(self): rg = self.provider.azure_client.get_resource_group('MyGroup') print("Get Resource - " + str(rg)) self.assertTrue( rg.name == "testResourceGroup", "Resource Group should be {0}".format(rg.name))
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase class AzureResourceGroupTestCase(ProviderTestBase): def test_resource_group_create(self): resource_group_params = {'location': self.provider.region_name} rg = self.provider.azure_client. \ create_resource_group(self.provider.resource_group, resource_group_params) print("Create Resource - " + str(rg)) self.assertTrue( rg.name == "cloudbridge", "Resource Group should be Cloudbridge") def test_resource_group_get(self): rg = self.provider.azure_client.get_resource_group('MyGroup') print("Get Resource - " + str(rg)) self.assertTrue( rg.name == "testResourceGroup", "Resource Group should be Cloudbridge") Update resource group unit testfrom cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase class AzureResourceGroupTestCase(ProviderTestBase): def test_resource_group_create(self): resource_group_params = {'location': self.provider.region_name} rg = self.provider.azure_client. \ create_resource_group(self.provider.resource_group, resource_group_params) print("Create Resource - " + str(rg)) self.assertTrue( rg.name == self.provider.resource_group, "Resource Group should be {0}".format(rg.name)) def test_resource_group_get(self): rg = self.provider.azure_client.get_resource_group('MyGroup') print("Get Resource - " + str(rg)) self.assertTrue( rg.name == "testResourceGroup", "Resource Group should be {0}".format(rg.name))
<commit_before>from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase class AzureResourceGroupTestCase(ProviderTestBase): def test_resource_group_create(self): resource_group_params = {'location': self.provider.region_name} rg = self.provider.azure_client. \ create_resource_group(self.provider.resource_group, resource_group_params) print("Create Resource - " + str(rg)) self.assertTrue( rg.name == "cloudbridge", "Resource Group should be Cloudbridge") def test_resource_group_get(self): rg = self.provider.azure_client.get_resource_group('MyGroup') print("Get Resource - " + str(rg)) self.assertTrue( rg.name == "testResourceGroup", "Resource Group should be Cloudbridge") <commit_msg>Update resource group unit test<commit_after>from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase class AzureResourceGroupTestCase(ProviderTestBase): def test_resource_group_create(self): resource_group_params = {'location': self.provider.region_name} rg = self.provider.azure_client. \ create_resource_group(self.provider.resource_group, resource_group_params) print("Create Resource - " + str(rg)) self.assertTrue( rg.name == self.provider.resource_group, "Resource Group should be {0}".format(rg.name)) def test_resource_group_get(self): rg = self.provider.azure_client.get_resource_group('MyGroup') print("Get Resource - " + str(rg)) self.assertTrue( rg.name == "testResourceGroup", "Resource Group should be {0}".format(rg.name))
276f74dfef46f5fa0913ddd3759d682eb58c7cec
chmvh_website/gallery/tasks.py
chmvh_website/gallery/tasks.py
from io import BytesIO from django.conf import settings from django.core.files.base import ContentFile from PIL import Image from chmvh_website import celery_app @celery_app.task def create_thumbnail(patient): image = Image.open(patient.picture.path) pil_type = image.format if pil_type == 'JPEG': ext = 'jpg' elif pil_type == 'PNG': ext = 'png' else: print("Can't generate thumbnail for '{0}'".format(pil_type)) return False image.thumbnail(settings.GALLERY_THUMBNAIL_SIZE) temp_handle = BytesIO() image.save(temp_handle, pil_type) temp_handle.seek(0) path = patient.picture.name.rsplit('.', 1)[0] patient.thumbnail.save( '{0}_thumbnail.{1}'.format(path, ext), ContentFile(temp_handle.getvalue()), save=False) patient.save(update_fields=['thumbnail']) return True
from io import BytesIO from celery.utils.log import get_task_logger from django.conf import settings from django.core.files.base import ContentFile from PIL import Image from chmvh_website import celery_app _INVALID_FORMAT_ERROR = ("Can't generate thumbnail for {type} filetype. " "(Path: {path})") default_logger = get_task_logger(__name__) @celery_app.task def create_thumbnail(patient, logger=default_logger): logger.debug("Generating thumbnail for {0}".format(patient.picture.path)) image = Image.open(patient.picture.path) pil_type = image.format if pil_type == 'JPEG': ext = 'jpg' elif pil_type == 'PNG': ext = 'png' else: logger.warning(_INVALID_FORMAT_ERROR.format({ 'path': patient.picture.path, 'type': pil_type, })) return False image.thumbnail(settings.GALLERY_THUMBNAIL_SIZE) temp_handle = BytesIO() image.save(temp_handle, pil_type) temp_handle.seek(0) path = patient.picture.name.rsplit('.', 1)[0] thumb_path = '{0}_thumbnail.{1}'.format(path, ext) patient.thumbnail.save( thumb_path, ContentFile(temp_handle.getvalue()), save=False) logger.debug("Saving thumbnail to {0}".format(thumb_path)) patient.save(update_fields=['thumbnail']) logger.info("Generated thumbnail {0}".format(thumb_path)) return True
Add logging to thumbnail creation task.
Add logging to thumbnail creation task.
Python
mit
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
from io import BytesIO from django.conf import settings from django.core.files.base import ContentFile from PIL import Image from chmvh_website import celery_app @celery_app.task def create_thumbnail(patient): image = Image.open(patient.picture.path) pil_type = image.format if pil_type == 'JPEG': ext = 'jpg' elif pil_type == 'PNG': ext = 'png' else: print("Can't generate thumbnail for '{0}'".format(pil_type)) return False image.thumbnail(settings.GALLERY_THUMBNAIL_SIZE) temp_handle = BytesIO() image.save(temp_handle, pil_type) temp_handle.seek(0) path = patient.picture.name.rsplit('.', 1)[0] patient.thumbnail.save( '{0}_thumbnail.{1}'.format(path, ext), ContentFile(temp_handle.getvalue()), save=False) patient.save(update_fields=['thumbnail']) return True Add logging to thumbnail creation task.
from io import BytesIO from celery.utils.log import get_task_logger from django.conf import settings from django.core.files.base import ContentFile from PIL import Image from chmvh_website import celery_app _INVALID_FORMAT_ERROR = ("Can't generate thumbnail for {type} filetype. " "(Path: {path})") default_logger = get_task_logger(__name__) @celery_app.task def create_thumbnail(patient, logger=default_logger): logger.debug("Generating thumbnail for {0}".format(patient.picture.path)) image = Image.open(patient.picture.path) pil_type = image.format if pil_type == 'JPEG': ext = 'jpg' elif pil_type == 'PNG': ext = 'png' else: logger.warning(_INVALID_FORMAT_ERROR.format({ 'path': patient.picture.path, 'type': pil_type, })) return False image.thumbnail(settings.GALLERY_THUMBNAIL_SIZE) temp_handle = BytesIO() image.save(temp_handle, pil_type) temp_handle.seek(0) path = patient.picture.name.rsplit('.', 1)[0] thumb_path = '{0}_thumbnail.{1}'.format(path, ext) patient.thumbnail.save( thumb_path, ContentFile(temp_handle.getvalue()), save=False) logger.debug("Saving thumbnail to {0}".format(thumb_path)) patient.save(update_fields=['thumbnail']) logger.info("Generated thumbnail {0}".format(thumb_path)) return True
<commit_before>from io import BytesIO from django.conf import settings from django.core.files.base import ContentFile from PIL import Image from chmvh_website import celery_app @celery_app.task def create_thumbnail(patient): image = Image.open(patient.picture.path) pil_type = image.format if pil_type == 'JPEG': ext = 'jpg' elif pil_type == 'PNG': ext = 'png' else: print("Can't generate thumbnail for '{0}'".format(pil_type)) return False image.thumbnail(settings.GALLERY_THUMBNAIL_SIZE) temp_handle = BytesIO() image.save(temp_handle, pil_type) temp_handle.seek(0) path = patient.picture.name.rsplit('.', 1)[0] patient.thumbnail.save( '{0}_thumbnail.{1}'.format(path, ext), ContentFile(temp_handle.getvalue()), save=False) patient.save(update_fields=['thumbnail']) return True <commit_msg>Add logging to thumbnail creation task.<commit_after>
from io import BytesIO from celery.utils.log import get_task_logger from django.conf import settings from django.core.files.base import ContentFile from PIL import Image from chmvh_website import celery_app _INVALID_FORMAT_ERROR = ("Can't generate thumbnail for {type} filetype. " "(Path: {path})") default_logger = get_task_logger(__name__) @celery_app.task def create_thumbnail(patient, logger=default_logger): logger.debug("Generating thumbnail for {0}".format(patient.picture.path)) image = Image.open(patient.picture.path) pil_type = image.format if pil_type == 'JPEG': ext = 'jpg' elif pil_type == 'PNG': ext = 'png' else: logger.warning(_INVALID_FORMAT_ERROR.format({ 'path': patient.picture.path, 'type': pil_type, })) return False image.thumbnail(settings.GALLERY_THUMBNAIL_SIZE) temp_handle = BytesIO() image.save(temp_handle, pil_type) temp_handle.seek(0) path = patient.picture.name.rsplit('.', 1)[0] thumb_path = '{0}_thumbnail.{1}'.format(path, ext) patient.thumbnail.save( thumb_path, ContentFile(temp_handle.getvalue()), save=False) logger.debug("Saving thumbnail to {0}".format(thumb_path)) patient.save(update_fields=['thumbnail']) logger.info("Generated thumbnail {0}".format(thumb_path)) return True
from io import BytesIO from django.conf import settings from django.core.files.base import ContentFile from PIL import Image from chmvh_website import celery_app @celery_app.task def create_thumbnail(patient): image = Image.open(patient.picture.path) pil_type = image.format if pil_type == 'JPEG': ext = 'jpg' elif pil_type == 'PNG': ext = 'png' else: print("Can't generate thumbnail for '{0}'".format(pil_type)) return False image.thumbnail(settings.GALLERY_THUMBNAIL_SIZE) temp_handle = BytesIO() image.save(temp_handle, pil_type) temp_handle.seek(0) path = patient.picture.name.rsplit('.', 1)[0] patient.thumbnail.save( '{0}_thumbnail.{1}'.format(path, ext), ContentFile(temp_handle.getvalue()), save=False) patient.save(update_fields=['thumbnail']) return True Add logging to thumbnail creation task.from io import BytesIO from celery.utils.log import get_task_logger from django.conf import settings from django.core.files.base import ContentFile from PIL import Image from chmvh_website import celery_app _INVALID_FORMAT_ERROR = ("Can't generate thumbnail for {type} filetype. " "(Path: {path})") default_logger = get_task_logger(__name__) @celery_app.task def create_thumbnail(patient, logger=default_logger): logger.debug("Generating thumbnail for {0}".format(patient.picture.path)) image = Image.open(patient.picture.path) pil_type = image.format if pil_type == 'JPEG': ext = 'jpg' elif pil_type == 'PNG': ext = 'png' else: logger.warning(_INVALID_FORMAT_ERROR.format({ 'path': patient.picture.path, 'type': pil_type, })) return False image.thumbnail(settings.GALLERY_THUMBNAIL_SIZE) temp_handle = BytesIO() image.save(temp_handle, pil_type) temp_handle.seek(0) path = patient.picture.name.rsplit('.', 1)[0] thumb_path = '{0}_thumbnail.{1}'.format(path, ext) patient.thumbnail.save( thumb_path, ContentFile(temp_handle.getvalue()), save=False) logger.debug("Saving thumbnail to {0}".format(thumb_path)) patient.save(update_fields=['thumbnail']) logger.info("Generated thumbnail {0}".format(thumb_path)) return True
<commit_before>from io import BytesIO from django.conf import settings from django.core.files.base import ContentFile from PIL import Image from chmvh_website import celery_app @celery_app.task def create_thumbnail(patient): image = Image.open(patient.picture.path) pil_type = image.format if pil_type == 'JPEG': ext = 'jpg' elif pil_type == 'PNG': ext = 'png' else: print("Can't generate thumbnail for '{0}'".format(pil_type)) return False image.thumbnail(settings.GALLERY_THUMBNAIL_SIZE) temp_handle = BytesIO() image.save(temp_handle, pil_type) temp_handle.seek(0) path = patient.picture.name.rsplit('.', 1)[0] patient.thumbnail.save( '{0}_thumbnail.{1}'.format(path, ext), ContentFile(temp_handle.getvalue()), save=False) patient.save(update_fields=['thumbnail']) return True <commit_msg>Add logging to thumbnail creation task.<commit_after>from io import BytesIO from celery.utils.log import get_task_logger from django.conf import settings from django.core.files.base import ContentFile from PIL import Image from chmvh_website import celery_app _INVALID_FORMAT_ERROR = ("Can't generate thumbnail for {type} filetype. " "(Path: {path})") default_logger = get_task_logger(__name__) @celery_app.task def create_thumbnail(patient, logger=default_logger): logger.debug("Generating thumbnail for {0}".format(patient.picture.path)) image = Image.open(patient.picture.path) pil_type = image.format if pil_type == 'JPEG': ext = 'jpg' elif pil_type == 'PNG': ext = 'png' else: logger.warning(_INVALID_FORMAT_ERROR.format({ 'path': patient.picture.path, 'type': pil_type, })) return False image.thumbnail(settings.GALLERY_THUMBNAIL_SIZE) temp_handle = BytesIO() image.save(temp_handle, pil_type) temp_handle.seek(0) path = patient.picture.name.rsplit('.', 1)[0] thumb_path = '{0}_thumbnail.{1}'.format(path, ext) patient.thumbnail.save( thumb_path, ContentFile(temp_handle.getvalue()), save=False) logger.debug("Saving thumbnail to {0}".format(thumb_path)) patient.save(update_fields=['thumbnail']) logger.info("Generated thumbnail {0}".format(thumb_path)) return True
d535cf76b3129c0e5b6908a720bdf3e3a804e41b
mopidy/mixers/gstreamer_software.py
mopidy/mixers/gstreamer_software.py
import multiprocessing from mopidy.mixers import BaseMixer from mopidy.utils.process import pickle_connection class GStreamerSoftwareMixer(BaseMixer): """Mixer which uses GStreamer to control volume in software.""" def __init__(self, *args, **kwargs): super(GStreamerSoftwareMixer, self).__init__(*args, **kwargs) def _get_volume(self): my_end, other_end = multiprocessing.Pipe() self.backend.output_queue.put({ 'command': 'get_volume', 'reply_to': pickle_connection(other_end), }) my_end.poll(None) return my_end.recv() def _set_volume(self, volume): self.backend.output_queue.put({ 'command': 'set_volume', 'volume': volume, })
from mopidy.mixers import BaseMixer class GStreamerSoftwareMixer(BaseMixer): """Mixer which uses GStreamer to control volume in software.""" def __init__(self, *args, **kwargs): super(GStreamerSoftwareMixer, self).__init__(*args, **kwargs) def _get_volume(self): return self.backend.output.get_volume() def _set_volume(self, volume): self.backend.output.set_volume(volume)
Update GStreamer software mixer to use new output API
Update GStreamer software mixer to use new output API
Python
apache-2.0
SuperStarPL/mopidy,bacontext/mopidy,dbrgn/mopidy,ali/mopidy,quartz55/mopidy,adamcik/mopidy,liamw9534/mopidy,ali/mopidy,swak/mopidy,liamw9534/mopidy,abarisain/mopidy,swak/mopidy,mopidy/mopidy,pacificIT/mopidy,swak/mopidy,diandiankan/mopidy,adamcik/mopidy,bencevans/mopidy,hkariti/mopidy,glogiotatidis/mopidy,vrs01/mopidy,ZenithDK/mopidy,diandiankan/mopidy,bencevans/mopidy,tkem/mopidy,mokieyue/mopidy,jodal/mopidy,ali/mopidy,priestd09/mopidy,quartz55/mopidy,mopidy/mopidy,rawdlite/mopidy,kingosticks/mopidy,woutervanwijk/mopidy,jmarsik/mopidy,ali/mopidy,kingosticks/mopidy,dbrgn/mopidy,vrs01/mopidy,bacontext/mopidy,mopidy/mopidy,bencevans/mopidy,ZenithDK/mopidy,pacificIT/mopidy,swak/mopidy,rawdlite/mopidy,tkem/mopidy,dbrgn/mopidy,priestd09/mopidy,jcass77/mopidy,jcass77/mopidy,abarisain/mopidy,quartz55/mopidy,mokieyue/mopidy,quartz55/mopidy,jcass77/mopidy,hkariti/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,vrs01/mopidy,kingosticks/mopidy,priestd09/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,tkem/mopidy,jmarsik/mopidy,rawdlite/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,jodal/mopidy,adamcik/mopidy,jmarsik/mopidy,vrs01/mopidy,rawdlite/mopidy,jodal/mopidy,ZenithDK/mopidy,mokieyue/mopidy,glogiotatidis/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,bacontext/mopidy,hkariti/mopidy,pacificIT/mopidy,hkariti/mopidy,mokieyue/mopidy,diandiankan/mopidy,bacontext/mopidy,pacificIT/mopidy,bencevans/mopidy,jmarsik/mopidy,woutervanwijk/mopidy,tkem/mopidy
import multiprocessing from mopidy.mixers import BaseMixer from mopidy.utils.process import pickle_connection class GStreamerSoftwareMixer(BaseMixer): """Mixer which uses GStreamer to control volume in software.""" def __init__(self, *args, **kwargs): super(GStreamerSoftwareMixer, self).__init__(*args, **kwargs) def _get_volume(self): my_end, other_end = multiprocessing.Pipe() self.backend.output_queue.put({ 'command': 'get_volume', 'reply_to': pickle_connection(other_end), }) my_end.poll(None) return my_end.recv() def _set_volume(self, volume): self.backend.output_queue.put({ 'command': 'set_volume', 'volume': volume, }) Update GStreamer software mixer to use new output API
from mopidy.mixers import BaseMixer class GStreamerSoftwareMixer(BaseMixer): """Mixer which uses GStreamer to control volume in software.""" def __init__(self, *args, **kwargs): super(GStreamerSoftwareMixer, self).__init__(*args, **kwargs) def _get_volume(self): return self.backend.output.get_volume() def _set_volume(self, volume): self.backend.output.set_volume(volume)
<commit_before>import multiprocessing from mopidy.mixers import BaseMixer from mopidy.utils.process import pickle_connection class GStreamerSoftwareMixer(BaseMixer): """Mixer which uses GStreamer to control volume in software.""" def __init__(self, *args, **kwargs): super(GStreamerSoftwareMixer, self).__init__(*args, **kwargs) def _get_volume(self): my_end, other_end = multiprocessing.Pipe() self.backend.output_queue.put({ 'command': 'get_volume', 'reply_to': pickle_connection(other_end), }) my_end.poll(None) return my_end.recv() def _set_volume(self, volume): self.backend.output_queue.put({ 'command': 'set_volume', 'volume': volume, }) <commit_msg>Update GStreamer software mixer to use new output API<commit_after>
from mopidy.mixers import BaseMixer class GStreamerSoftwareMixer(BaseMixer): """Mixer which uses GStreamer to control volume in software.""" def __init__(self, *args, **kwargs): super(GStreamerSoftwareMixer, self).__init__(*args, **kwargs) def _get_volume(self): return self.backend.output.get_volume() def _set_volume(self, volume): self.backend.output.set_volume(volume)
import multiprocessing from mopidy.mixers import BaseMixer from mopidy.utils.process import pickle_connection class GStreamerSoftwareMixer(BaseMixer): """Mixer which uses GStreamer to control volume in software.""" def __init__(self, *args, **kwargs): super(GStreamerSoftwareMixer, self).__init__(*args, **kwargs) def _get_volume(self): my_end, other_end = multiprocessing.Pipe() self.backend.output_queue.put({ 'command': 'get_volume', 'reply_to': pickle_connection(other_end), }) my_end.poll(None) return my_end.recv() def _set_volume(self, volume): self.backend.output_queue.put({ 'command': 'set_volume', 'volume': volume, }) Update GStreamer software mixer to use new output APIfrom mopidy.mixers import BaseMixer class GStreamerSoftwareMixer(BaseMixer): """Mixer which uses GStreamer to control volume in software.""" def __init__(self, *args, **kwargs): super(GStreamerSoftwareMixer, self).__init__(*args, **kwargs) def _get_volume(self): return self.backend.output.get_volume() def _set_volume(self, volume): self.backend.output.set_volume(volume)
<commit_before>import multiprocessing from mopidy.mixers import BaseMixer from mopidy.utils.process import pickle_connection class GStreamerSoftwareMixer(BaseMixer): """Mixer which uses GStreamer to control volume in software.""" def __init__(self, *args, **kwargs): super(GStreamerSoftwareMixer, self).__init__(*args, **kwargs) def _get_volume(self): my_end, other_end = multiprocessing.Pipe() self.backend.output_queue.put({ 'command': 'get_volume', 'reply_to': pickle_connection(other_end), }) my_end.poll(None) return my_end.recv() def _set_volume(self, volume): self.backend.output_queue.put({ 'command': 'set_volume', 'volume': volume, }) <commit_msg>Update GStreamer software mixer to use new output API<commit_after>from mopidy.mixers import BaseMixer class GStreamerSoftwareMixer(BaseMixer): """Mixer which uses GStreamer to control volume in software.""" def __init__(self, *args, **kwargs): super(GStreamerSoftwareMixer, self).__init__(*args, **kwargs) def _get_volume(self): return self.backend.output.get_volume() def _set_volume(self, volume): self.backend.output.set_volume(volume)
104c136488d468f26c7fe247d0548636cbf3c6fe
random_4.py
random_4.py
""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: print(''.join(map(str, l[1:5]))) else: print(''.join(map(str, l[0:4])))
""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random # 1. l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: pos = random.choice(range(1, len(l))) l[0], l[pos] = l[pos], l[0] print(''.join(map(str, l[0:4]))) # 2. # We create a set of digits: {0, 1, .... 9} digits = set(range(10)) # We generate a random integer, 1 <= first <= 9 first = random.randint(1, 9) # We remove it from our set, then take a sample of # 3 distinct elements from the remaining values last_3 = random.sample(digits - {first}, 3) print(str(first) + ''.join(map(str, last_3))) # 3. numbers = [0] while numbers[0] == 0: numbers = random.sample(range(10), 4) print(''.join(map(str, numbers)))
Fix of shuffle. There should be random swap of leading zero with one from nine (non-zero) positions.
Fix of shuffle. There should be random swap of leading zero with one from nine (non-zero) positions.
Python
mit
foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard
""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: print(''.join(map(str, l[1:5]))) else: print(''.join(map(str, l[0:4]))) Fix of shuffle. There should be random swap of leading zero with one from nine (non-zero) positions.
""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random # 1. l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: pos = random.choice(range(1, len(l))) l[0], l[pos] = l[pos], l[0] print(''.join(map(str, l[0:4]))) # 2. # We create a set of digits: {0, 1, .... 9} digits = set(range(10)) # We generate a random integer, 1 <= first <= 9 first = random.randint(1, 9) # We remove it from our set, then take a sample of # 3 distinct elements from the remaining values last_3 = random.sample(digits - {first}, 3) print(str(first) + ''.join(map(str, last_3))) # 3. numbers = [0] while numbers[0] == 0: numbers = random.sample(range(10), 4) print(''.join(map(str, numbers)))
<commit_before>""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: print(''.join(map(str, l[1:5]))) else: print(''.join(map(str, l[0:4]))) <commit_msg>Fix of shuffle. There should be random swap of leading zero with one from nine (non-zero) positions.<commit_after>
""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random # 1. l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: pos = random.choice(range(1, len(l))) l[0], l[pos] = l[pos], l[0] print(''.join(map(str, l[0:4]))) # 2. # We create a set of digits: {0, 1, .... 9} digits = set(range(10)) # We generate a random integer, 1 <= first <= 9 first = random.randint(1, 9) # We remove it from our set, then take a sample of # 3 distinct elements from the remaining values last_3 = random.sample(digits - {first}, 3) print(str(first) + ''.join(map(str, last_3))) # 3. numbers = [0] while numbers[0] == 0: numbers = random.sample(range(10), 4) print(''.join(map(str, numbers)))
""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: print(''.join(map(str, l[1:5]))) else: print(''.join(map(str, l[0:4]))) Fix of shuffle. There should be random swap of leading zero with one from nine (non-zero) positions.""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random # 1. l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: pos = random.choice(range(1, len(l))) l[0], l[pos] = l[pos], l[0] print(''.join(map(str, l[0:4]))) # 2. # We create a set of digits: {0, 1, .... 9} digits = set(range(10)) # We generate a random integer, 1 <= first <= 9 first = random.randint(1, 9) # We remove it from our set, then take a sample of # 3 distinct elements from the remaining values last_3 = random.sample(digits - {first}, 3) print(str(first) + ''.join(map(str, last_3))) # 3. numbers = [0] while numbers[0] == 0: numbers = random.sample(range(10), 4) print(''.join(map(str, numbers)))
<commit_before>""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: print(''.join(map(str, l[1:5]))) else: print(''.join(map(str, l[0:4]))) <commit_msg>Fix of shuffle. There should be random swap of leading zero with one from nine (non-zero) positions.<commit_after>""" How to generate a random 4 digit number not starting with 0 and having unique digits in python? """ import random # 1. l = [0,1,2,3,4,5,6,7,8,9] random.shuffle(l) if l[0] == 0: pos = random.choice(range(1, len(l))) l[0], l[pos] = l[pos], l[0] print(''.join(map(str, l[0:4]))) # 2. # We create a set of digits: {0, 1, .... 9} digits = set(range(10)) # We generate a random integer, 1 <= first <= 9 first = random.randint(1, 9) # We remove it from our set, then take a sample of # 3 distinct elements from the remaining values last_3 = random.sample(digits - {first}, 3) print(str(first) + ''.join(map(str, last_3))) # 3. numbers = [0] while numbers[0] == 0: numbers = random.sample(range(10), 4) print(''.join(map(str, numbers)))
863b12e503559b24de29407cd674d432bdcbbfc0
cs251tk/referee/send_email.py
cs251tk/referee/send_email.py
import smtplib def send_email(msg): # Send the message via our own SMTP server. with smtplib.SMTP_SSL('smtp.gmail.com', 465) as s: # s.starttls() s.set_debuglevel(2) s.send_message(msg)
import smtplib def send_email(msg): # Send the message via our own SMTP server. with smtplib.SMTP_SSL('smtp.gmail.com', 465) as s: s.send_message(msg)
Remove somore more lines related to email
Remove somore more lines related to email
Python
mit
StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit
import smtplib def send_email(msg): # Send the message via our own SMTP server. with smtplib.SMTP_SSL('smtp.gmail.com', 465) as s: # s.starttls() s.set_debuglevel(2) s.send_message(msg) Remove somore more lines related to email
import smtplib def send_email(msg): # Send the message via our own SMTP server. with smtplib.SMTP_SSL('smtp.gmail.com', 465) as s: s.send_message(msg)
<commit_before>import smtplib def send_email(msg): # Send the message via our own SMTP server. with smtplib.SMTP_SSL('smtp.gmail.com', 465) as s: # s.starttls() s.set_debuglevel(2) s.send_message(msg) <commit_msg>Remove somore more lines related to email<commit_after>
import smtplib def send_email(msg): # Send the message via our own SMTP server. with smtplib.SMTP_SSL('smtp.gmail.com', 465) as s: s.send_message(msg)
import smtplib def send_email(msg): # Send the message via our own SMTP server. with smtplib.SMTP_SSL('smtp.gmail.com', 465) as s: # s.starttls() s.set_debuglevel(2) s.send_message(msg) Remove somore more lines related to emailimport smtplib def send_email(msg): # Send the message via our own SMTP server. with smtplib.SMTP_SSL('smtp.gmail.com', 465) as s: s.send_message(msg)
<commit_before>import smtplib def send_email(msg): # Send the message via our own SMTP server. with smtplib.SMTP_SSL('smtp.gmail.com', 465) as s: # s.starttls() s.set_debuglevel(2) s.send_message(msg) <commit_msg>Remove somore more lines related to email<commit_after>import smtplib def send_email(msg): # Send the message via our own SMTP server. with smtplib.SMTP_SSL('smtp.gmail.com', 465) as s: s.send_message(msg)
1c0d42889b721cf68deb199711d8ae7700c40b66
marcottimls/tools/logsetup.py
marcottimls/tools/logsetup.py
import os import json import logging import logging.config def setup_logging(log_path, settings_path="logging.json", default_level=logging.INFO): """Setup logging configuration""" path = settings_path if os.path.exists(path): with open(path, 'rt') as f: config = json.load(f) config['handlers']['main']['filename'] = log_path logging.config.dictConfig(config) else: logging.basicConfig(level=default_level)
import os import json import logging import logging.config def setup_logging(settings_path="logging.json", default_level=logging.INFO): """Setup logging configuration""" path = settings_path if os.path.exists(path): with open(path, 'rt') as f: config = json.load(f) logging.config.dictConfig(config) else: logging.basicConfig(level=default_level)
Remove kludge from setup_logging as no longer necessary
Remove kludge from setup_logging as no longer necessary
Python
mit
soccermetrics/marcotti-mls
import os import json import logging import logging.config def setup_logging(log_path, settings_path="logging.json", default_level=logging.INFO): """Setup logging configuration""" path = settings_path if os.path.exists(path): with open(path, 'rt') as f: config = json.load(f) config['handlers']['main']['filename'] = log_path logging.config.dictConfig(config) else: logging.basicConfig(level=default_level) Remove kludge from setup_logging as no longer necessary
import os import json import logging import logging.config def setup_logging(settings_path="logging.json", default_level=logging.INFO): """Setup logging configuration""" path = settings_path if os.path.exists(path): with open(path, 'rt') as f: config = json.load(f) logging.config.dictConfig(config) else: logging.basicConfig(level=default_level)
<commit_before>import os import json import logging import logging.config def setup_logging(log_path, settings_path="logging.json", default_level=logging.INFO): """Setup logging configuration""" path = settings_path if os.path.exists(path): with open(path, 'rt') as f: config = json.load(f) config['handlers']['main']['filename'] = log_path logging.config.dictConfig(config) else: logging.basicConfig(level=default_level) <commit_msg>Remove kludge from setup_logging as no longer necessary<commit_after>
import os import json import logging import logging.config def setup_logging(settings_path="logging.json", default_level=logging.INFO): """Setup logging configuration""" path = settings_path if os.path.exists(path): with open(path, 'rt') as f: config = json.load(f) logging.config.dictConfig(config) else: logging.basicConfig(level=default_level)
import os import json import logging import logging.config def setup_logging(log_path, settings_path="logging.json", default_level=logging.INFO): """Setup logging configuration""" path = settings_path if os.path.exists(path): with open(path, 'rt') as f: config = json.load(f) config['handlers']['main']['filename'] = log_path logging.config.dictConfig(config) else: logging.basicConfig(level=default_level) Remove kludge from setup_logging as no longer necessaryimport os import json import logging import logging.config def setup_logging(settings_path="logging.json", default_level=logging.INFO): """Setup logging configuration""" path = settings_path if os.path.exists(path): with open(path, 'rt') as f: config = json.load(f) logging.config.dictConfig(config) else: logging.basicConfig(level=default_level)
<commit_before>import os import json import logging import logging.config def setup_logging(log_path, settings_path="logging.json", default_level=logging.INFO): """Setup logging configuration""" path = settings_path if os.path.exists(path): with open(path, 'rt') as f: config = json.load(f) config['handlers']['main']['filename'] = log_path logging.config.dictConfig(config) else: logging.basicConfig(level=default_level) <commit_msg>Remove kludge from setup_logging as no longer necessary<commit_after>import os import json import logging import logging.config def setup_logging(settings_path="logging.json", default_level=logging.INFO): """Setup logging configuration""" path = settings_path if os.path.exists(path): with open(path, 'rt') as f: config = json.load(f) logging.config.dictConfig(config) else: logging.basicConfig(level=default_level)
93be3585d269360641091f18a6443979eb8f1f98
cito/dump_db.py
cito/dump_db.py
"""Delete all documents every second forever""" __author__ = 'tunnell' import sys import time import json import pymongo if __name__ == "__main__": c = pymongo.MongoClient() db = c.data collection = db.test # Key to sort by so we can use an index for quick query sort_key = [("_id", pymongo.ASCENDING), ("triggertime", pymongo.ASCENDING)] # Index for quick query collection.create_index(sort_key, dropDups=True) # Loop until Ctrl-C or error while (1): # This try-except catches Ctrl-C and error try: # Non-sense query that is in index query = {"triggertime": {'$gt': 0}} # Perform query cursor = collection.find(query, fields=['triggertime']).sort(sort_key) # Are we using index for quick queries? Not always true if there # are no documents in the collection... print('Using index:', cursor.explain()['indexOnly']) # Stats on how the delete worked. Write concern (w=1) is on. print(json.dumps(collection.remove(query), indent=4, sort_keys=True, w=1)) # Wait a second so we don't query the DB too much time.sleep(1) except pymongo.errors.OperationFailure as e: print('MongoDB error:', e) except KeyboardInterrupt: print("Ctrl-C caught so exiting.") sys.exit(0)
"""Delete all documents every second forever""" __author__ = 'tunnell' import sys import time import json import pymongo if __name__ == "__main__": c = pymongo.MongoClient() db = c.data collection = db.test # Key to sort by so we can use an index for quick query sort_key = [("_id", pymongo.ASCENDING), ("triggertime", pymongo.ASCENDING)] # Index for quick query collection.create_index(sort_key, dropDups=True) # Loop until Ctrl-C or error while (1): # This try-except catches Ctrl-C and error try: # Non-sense query that is in index query = {"triggertime": {'$gt': 0}} # Perform query cursor = collection.find(query, fields=['triggertime']).sort(sort_key) # Are we using index for quick queries? Not always true if there # are no documents in the collection... print('Using index:', cursor.explain()['indexOnly']) # Stats on how the delete worked. Write concern is on. print(json.dumps(collection.remove(query), indent=4, sort_keys=True)) # Wait a second so we don't query the DB too much time.sleep(1) except pymongo.errors.OperationFailure as e: print('MongoDB error:', e) except KeyboardInterrupt: print("Ctrl-C caught so exiting.") sys.exit(0)
Write concern bug fix in constantly deleting DB. It's on by default and w=1 does nothing.
BUG: Write concern bug fix in constantly deleting DB. It's on by default and w=1 does nothing.
Python
bsd-3-clause
tunnell/wax,tunnell/wax,tunnell/wax
"""Delete all documents every second forever""" __author__ = 'tunnell' import sys import time import json import pymongo if __name__ == "__main__": c = pymongo.MongoClient() db = c.data collection = db.test # Key to sort by so we can use an index for quick query sort_key = [("_id", pymongo.ASCENDING), ("triggertime", pymongo.ASCENDING)] # Index for quick query collection.create_index(sort_key, dropDups=True) # Loop until Ctrl-C or error while (1): # This try-except catches Ctrl-C and error try: # Non-sense query that is in index query = {"triggertime": {'$gt': 0}} # Perform query cursor = collection.find(query, fields=['triggertime']).sort(sort_key) # Are we using index for quick queries? Not always true if there # are no documents in the collection... print('Using index:', cursor.explain()['indexOnly']) # Stats on how the delete worked. Write concern (w=1) is on. print(json.dumps(collection.remove(query), indent=4, sort_keys=True, w=1)) # Wait a second so we don't query the DB too much time.sleep(1) except pymongo.errors.OperationFailure as e: print('MongoDB error:', e) except KeyboardInterrupt: print("Ctrl-C caught so exiting.") sys.exit(0) BUG: Write concern bug fix in constantly deleting DB. It's on by default and w=1 does nothing.
"""Delete all documents every second forever""" __author__ = 'tunnell' import sys import time import json import pymongo if __name__ == "__main__": c = pymongo.MongoClient() db = c.data collection = db.test # Key to sort by so we can use an index for quick query sort_key = [("_id", pymongo.ASCENDING), ("triggertime", pymongo.ASCENDING)] # Index for quick query collection.create_index(sort_key, dropDups=True) # Loop until Ctrl-C or error while (1): # This try-except catches Ctrl-C and error try: # Non-sense query that is in index query = {"triggertime": {'$gt': 0}} # Perform query cursor = collection.find(query, fields=['triggertime']).sort(sort_key) # Are we using index for quick queries? Not always true if there # are no documents in the collection... print('Using index:', cursor.explain()['indexOnly']) # Stats on how the delete worked. Write concern is on. print(json.dumps(collection.remove(query), indent=4, sort_keys=True)) # Wait a second so we don't query the DB too much time.sleep(1) except pymongo.errors.OperationFailure as e: print('MongoDB error:', e) except KeyboardInterrupt: print("Ctrl-C caught so exiting.") sys.exit(0)
<commit_before>"""Delete all documents every second forever""" __author__ = 'tunnell' import sys import time import json import pymongo if __name__ == "__main__": c = pymongo.MongoClient() db = c.data collection = db.test # Key to sort by so we can use an index for quick query sort_key = [("_id", pymongo.ASCENDING), ("triggertime", pymongo.ASCENDING)] # Index for quick query collection.create_index(sort_key, dropDups=True) # Loop until Ctrl-C or error while (1): # This try-except catches Ctrl-C and error try: # Non-sense query that is in index query = {"triggertime": {'$gt': 0}} # Perform query cursor = collection.find(query, fields=['triggertime']).sort(sort_key) # Are we using index for quick queries? Not always true if there # are no documents in the collection... print('Using index:', cursor.explain()['indexOnly']) # Stats on how the delete worked. Write concern (w=1) is on. print(json.dumps(collection.remove(query), indent=4, sort_keys=True, w=1)) # Wait a second so we don't query the DB too much time.sleep(1) except pymongo.errors.OperationFailure as e: print('MongoDB error:', e) except KeyboardInterrupt: print("Ctrl-C caught so exiting.") sys.exit(0) <commit_msg>BUG: Write concern bug fix in constantly deleting DB. It's on by default and w=1 does nothing.<commit_after>
"""Delete all documents every second forever""" __author__ = 'tunnell' import sys import time import json import pymongo if __name__ == "__main__": c = pymongo.MongoClient() db = c.data collection = db.test # Key to sort by so we can use an index for quick query sort_key = [("_id", pymongo.ASCENDING), ("triggertime", pymongo.ASCENDING)] # Index for quick query collection.create_index(sort_key, dropDups=True) # Loop until Ctrl-C or error while (1): # This try-except catches Ctrl-C and error try: # Non-sense query that is in index query = {"triggertime": {'$gt': 0}} # Perform query cursor = collection.find(query, fields=['triggertime']).sort(sort_key) # Are we using index for quick queries? Not always true if there # are no documents in the collection... print('Using index:', cursor.explain()['indexOnly']) # Stats on how the delete worked. Write concern is on. print(json.dumps(collection.remove(query), indent=4, sort_keys=True)) # Wait a second so we don't query the DB too much time.sleep(1) except pymongo.errors.OperationFailure as e: print('MongoDB error:', e) except KeyboardInterrupt: print("Ctrl-C caught so exiting.") sys.exit(0)
"""Delete all documents every second forever""" __author__ = 'tunnell' import sys import time import json import pymongo if __name__ == "__main__": c = pymongo.MongoClient() db = c.data collection = db.test # Key to sort by so we can use an index for quick query sort_key = [("_id", pymongo.ASCENDING), ("triggertime", pymongo.ASCENDING)] # Index for quick query collection.create_index(sort_key, dropDups=True) # Loop until Ctrl-C or error while (1): # This try-except catches Ctrl-C and error try: # Non-sense query that is in index query = {"triggertime": {'$gt': 0}} # Perform query cursor = collection.find(query, fields=['triggertime']).sort(sort_key) # Are we using index for quick queries? Not always true if there # are no documents in the collection... print('Using index:', cursor.explain()['indexOnly']) # Stats on how the delete worked. Write concern (w=1) is on. print(json.dumps(collection.remove(query), indent=4, sort_keys=True, w=1)) # Wait a second so we don't query the DB too much time.sleep(1) except pymongo.errors.OperationFailure as e: print('MongoDB error:', e) except KeyboardInterrupt: print("Ctrl-C caught so exiting.") sys.exit(0) BUG: Write concern bug fix in constantly deleting DB. It's on by default and w=1 does nothing."""Delete all documents every second forever""" __author__ = 'tunnell' import sys import time import json import pymongo if __name__ == "__main__": c = pymongo.MongoClient() db = c.data collection = db.test # Key to sort by so we can use an index for quick query sort_key = [("_id", pymongo.ASCENDING), ("triggertime", pymongo.ASCENDING)] # Index for quick query collection.create_index(sort_key, dropDups=True) # Loop until Ctrl-C or error while (1): # This try-except catches Ctrl-C and error try: # Non-sense query that is in index query = {"triggertime": {'$gt': 0}} # Perform query cursor = collection.find(query, fields=['triggertime']).sort(sort_key) # Are we using index for quick queries? Not always true if there # are no documents in the collection... print('Using index:', cursor.explain()['indexOnly']) # Stats on how the delete worked. Write concern is on. print(json.dumps(collection.remove(query), indent=4, sort_keys=True)) # Wait a second so we don't query the DB too much time.sleep(1) except pymongo.errors.OperationFailure as e: print('MongoDB error:', e) except KeyboardInterrupt: print("Ctrl-C caught so exiting.") sys.exit(0)
<commit_before>"""Delete all documents every second forever""" __author__ = 'tunnell' import sys import time import json import pymongo if __name__ == "__main__": c = pymongo.MongoClient() db = c.data collection = db.test # Key to sort by so we can use an index for quick query sort_key = [("_id", pymongo.ASCENDING), ("triggertime", pymongo.ASCENDING)] # Index for quick query collection.create_index(sort_key, dropDups=True) # Loop until Ctrl-C or error while (1): # This try-except catches Ctrl-C and error try: # Non-sense query that is in index query = {"triggertime": {'$gt': 0}} # Perform query cursor = collection.find(query, fields=['triggertime']).sort(sort_key) # Are we using index for quick queries? Not always true if there # are no documents in the collection... print('Using index:', cursor.explain()['indexOnly']) # Stats on how the delete worked. Write concern (w=1) is on. print(json.dumps(collection.remove(query), indent=4, sort_keys=True, w=1)) # Wait a second so we don't query the DB too much time.sleep(1) except pymongo.errors.OperationFailure as e: print('MongoDB error:', e) except KeyboardInterrupt: print("Ctrl-C caught so exiting.") sys.exit(0) <commit_msg>BUG: Write concern bug fix in constantly deleting DB. It's on by default and w=1 does nothing.<commit_after>"""Delete all documents every second forever""" __author__ = 'tunnell' import sys import time import json import pymongo if __name__ == "__main__": c = pymongo.MongoClient() db = c.data collection = db.test # Key to sort by so we can use an index for quick query sort_key = [("_id", pymongo.ASCENDING), ("triggertime", pymongo.ASCENDING)] # Index for quick query collection.create_index(sort_key, dropDups=True) # Loop until Ctrl-C or error while (1): # This try-except catches Ctrl-C and error try: # Non-sense query that is in index query = {"triggertime": {'$gt': 0}} # Perform query cursor = collection.find(query, fields=['triggertime']).sort(sort_key) # Are we using index for quick queries? Not always true if there # are no documents in the collection... print('Using index:', cursor.explain()['indexOnly']) # Stats on how the delete worked. Write concern is on. print(json.dumps(collection.remove(query), indent=4, sort_keys=True)) # Wait a second so we don't query the DB too much time.sleep(1) except pymongo.errors.OperationFailure as e: print('MongoDB error:', e) except KeyboardInterrupt: print("Ctrl-C caught so exiting.") sys.exit(0)
44ed4ceffcbf95ed42e4bdebcc87a7137a97de50
been/source/markdown.py
been/source/markdown.py
from been.core import DirectorySource, source_registry class Markdown(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(Markdown)
from been.core import DirectorySource, source_registry class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory)
Rename Markdown source to MarkdownDirectory.
Rename Markdown source to MarkdownDirectory.
Python
bsd-3-clause
chromakode/been
from been.core import DirectorySource, source_registry class Markdown(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(Markdown) Rename Markdown source to MarkdownDirectory.
from been.core import DirectorySource, source_registry class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory)
<commit_before>from been.core import DirectorySource, source_registry class Markdown(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(Markdown) <commit_msg>Rename Markdown source to MarkdownDirectory.<commit_after>
from been.core import DirectorySource, source_registry class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory)
from been.core import DirectorySource, source_registry class Markdown(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(Markdown) Rename Markdown source to MarkdownDirectory.from been.core import DirectorySource, source_registry class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory)
<commit_before>from been.core import DirectorySource, source_registry class Markdown(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(Markdown) <commit_msg>Rename Markdown source to MarkdownDirectory.<commit_after>from been.core import DirectorySource, source_registry class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory)
a28f6a45f37d906a9f9901d46d683c3ca5406da4
code/csv2map.py
code/csv2map.py
# csv2map.py -- Convert .csv into a .map format # Description of MAP format: http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#map # # jean-daniel.granet@mines-paristech.fr import sys import argparse def main(): parser = argparse.ArgumentParser(description='Convert .csv to .map') parser.add_argument('csv', help="""CSV file to convert. Fields are : Index,Illumina_SNP_Name,Alternative_SNP_Name,Chromosome,Build36_Position,Build37_Position,new_rsname,Strand,TopAlleles,ForwardAlleles,DesignAlleles """) parser.add_argument('map', help='MAP file to create') args = parser.parse_args() # read the csv file and convert it into a MAP file with open(args.csv, 'r') as fdr: with open(args.map, 'w') as fdw: for line in fdr: line_split = line.split(',') fdw.write("%s\n" % "\n".join(["%s %s 0 %s" % (line_split[3], line_split[2], line_split[5])])) fdw.close() fdr.close() if __name__ == "__main__": main()
# csv2map.py -- Convert .csv into a .map format # Description of MAP format: http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#map # # jean-daniel.granet@mines-paristech.fr import sys import argparse def main(): parser = argparse.ArgumentParser(description='Convert .csv to .map') parser.add_argument('csv', help="""CSV file to convert. Fields are : Index,Illumina_SNP_Name,Alternative_SNP_Name,Chromosome,Build36_Position,Build37_Position,new_rsname,Strand,TopAlleles,ForwardAlleles,DesignAlleles """) parser.add_argument('map', help='MAP file to create. Fields will be : num chromosome (1-22, X, Y or 0 if unplaced), snp identifier, Genetic distance (morgans) = 0, Base-pair position (bp units)') args = parser.parse_args() # read the csv file and convert it into a MAP file with open(args.csv, 'r') as fdr: with open(args.map, 'w') as fdw: for line in fdr: line_split = line.split(',') fdw.write("%s\n" % "\n".join(["%s %s 0 %s" % (line_split[3], line_split[2], line_split[5])])) fdw.close() fdr.close() if __name__ == "__main__": main()
Add infos about MAP file
Add infos about MAP file
Python
mit
chagaz/sfan,chagaz/sfan,chagaz/sfan,chagaz/sfan,chagaz/sfan
# csv2map.py -- Convert .csv into a .map format # Description of MAP format: http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#map # # jean-daniel.granet@mines-paristech.fr import sys import argparse def main(): parser = argparse.ArgumentParser(description='Convert .csv to .map') parser.add_argument('csv', help="""CSV file to convert. Fields are : Index,Illumina_SNP_Name,Alternative_SNP_Name,Chromosome,Build36_Position,Build37_Position,new_rsname,Strand,TopAlleles,ForwardAlleles,DesignAlleles """) parser.add_argument('map', help='MAP file to create') args = parser.parse_args() # read the csv file and convert it into a MAP file with open(args.csv, 'r') as fdr: with open(args.map, 'w') as fdw: for line in fdr: line_split = line.split(',') fdw.write("%s\n" % "\n".join(["%s %s 0 %s" % (line_split[3], line_split[2], line_split[5])])) fdw.close() fdr.close() if __name__ == "__main__": main() Add infos about MAP file
# csv2map.py -- Convert .csv into a .map format # Description of MAP format: http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#map # # jean-daniel.granet@mines-paristech.fr import sys import argparse def main(): parser = argparse.ArgumentParser(description='Convert .csv to .map') parser.add_argument('csv', help="""CSV file to convert. Fields are : Index,Illumina_SNP_Name,Alternative_SNP_Name,Chromosome,Build36_Position,Build37_Position,new_rsname,Strand,TopAlleles,ForwardAlleles,DesignAlleles """) parser.add_argument('map', help='MAP file to create. Fields will be : num chromosome (1-22, X, Y or 0 if unplaced), snp identifier, Genetic distance (morgans) = 0, Base-pair position (bp units)') args = parser.parse_args() # read the csv file and convert it into a MAP file with open(args.csv, 'r') as fdr: with open(args.map, 'w') as fdw: for line in fdr: line_split = line.split(',') fdw.write("%s\n" % "\n".join(["%s %s 0 %s" % (line_split[3], line_split[2], line_split[5])])) fdw.close() fdr.close() if __name__ == "__main__": main()
<commit_before># csv2map.py -- Convert .csv into a .map format # Description of MAP format: http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#map # # jean-daniel.granet@mines-paristech.fr import sys import argparse def main(): parser = argparse.ArgumentParser(description='Convert .csv to .map') parser.add_argument('csv', help="""CSV file to convert. Fields are : Index,Illumina_SNP_Name,Alternative_SNP_Name,Chromosome,Build36_Position,Build37_Position,new_rsname,Strand,TopAlleles,ForwardAlleles,DesignAlleles """) parser.add_argument('map', help='MAP file to create') args = parser.parse_args() # read the csv file and convert it into a MAP file with open(args.csv, 'r') as fdr: with open(args.map, 'w') as fdw: for line in fdr: line_split = line.split(',') fdw.write("%s\n" % "\n".join(["%s %s 0 %s" % (line_split[3], line_split[2], line_split[5])])) fdw.close() fdr.close() if __name__ == "__main__": main() <commit_msg>Add infos about MAP file<commit_after>
# csv2map.py -- Convert .csv into a .map format # Description of MAP format: http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#map # # jean-daniel.granet@mines-paristech.fr import sys import argparse def main(): parser = argparse.ArgumentParser(description='Convert .csv to .map') parser.add_argument('csv', help="""CSV file to convert. Fields are : Index,Illumina_SNP_Name,Alternative_SNP_Name,Chromosome,Build36_Position,Build37_Position,new_rsname,Strand,TopAlleles,ForwardAlleles,DesignAlleles """) parser.add_argument('map', help='MAP file to create. Fields will be : num chromosome (1-22, X, Y or 0 if unplaced), snp identifier, Genetic distance (morgans) = 0, Base-pair position (bp units)') args = parser.parse_args() # read the csv file and convert it into a MAP file with open(args.csv, 'r') as fdr: with open(args.map, 'w') as fdw: for line in fdr: line_split = line.split(',') fdw.write("%s\n" % "\n".join(["%s %s 0 %s" % (line_split[3], line_split[2], line_split[5])])) fdw.close() fdr.close() if __name__ == "__main__": main()
# csv2map.py -- Convert .csv into a .map format # Description of MAP format: http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#map # # jean-daniel.granet@mines-paristech.fr import sys import argparse def main(): parser = argparse.ArgumentParser(description='Convert .csv to .map') parser.add_argument('csv', help="""CSV file to convert. Fields are : Index,Illumina_SNP_Name,Alternative_SNP_Name,Chromosome,Build36_Position,Build37_Position,new_rsname,Strand,TopAlleles,ForwardAlleles,DesignAlleles """) parser.add_argument('map', help='MAP file to create') args = parser.parse_args() # read the csv file and convert it into a MAP file with open(args.csv, 'r') as fdr: with open(args.map, 'w') as fdw: for line in fdr: line_split = line.split(',') fdw.write("%s\n" % "\n".join(["%s %s 0 %s" % (line_split[3], line_split[2], line_split[5])])) fdw.close() fdr.close() if __name__ == "__main__": main() Add infos about MAP file# csv2map.py -- Convert .csv into a .map format # Description of MAP format: http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#map # # jean-daniel.granet@mines-paristech.fr import sys import argparse def main(): parser = argparse.ArgumentParser(description='Convert .csv to .map') parser.add_argument('csv', help="""CSV file to convert. Fields are : Index,Illumina_SNP_Name,Alternative_SNP_Name,Chromosome,Build36_Position,Build37_Position,new_rsname,Strand,TopAlleles,ForwardAlleles,DesignAlleles """) parser.add_argument('map', help='MAP file to create. Fields will be : num chromosome (1-22, X, Y or 0 if unplaced), snp identifier, Genetic distance (morgans) = 0, Base-pair position (bp units)') args = parser.parse_args() # read the csv file and convert it into a MAP file with open(args.csv, 'r') as fdr: with open(args.map, 'w') as fdw: for line in fdr: line_split = line.split(',') fdw.write("%s\n" % "\n".join(["%s %s 0 %s" % (line_split[3], line_split[2], line_split[5])])) fdw.close() fdr.close() if __name__ == "__main__": main()
<commit_before># csv2map.py -- Convert .csv into a .map format # Description of MAP format: http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#map # # jean-daniel.granet@mines-paristech.fr import sys import argparse def main(): parser = argparse.ArgumentParser(description='Convert .csv to .map') parser.add_argument('csv', help="""CSV file to convert. Fields are : Index,Illumina_SNP_Name,Alternative_SNP_Name,Chromosome,Build36_Position,Build37_Position,new_rsname,Strand,TopAlleles,ForwardAlleles,DesignAlleles """) parser.add_argument('map', help='MAP file to create') args = parser.parse_args() # read the csv file and convert it into a MAP file with open(args.csv, 'r') as fdr: with open(args.map, 'w') as fdw: for line in fdr: line_split = line.split(',') fdw.write("%s\n" % "\n".join(["%s %s 0 %s" % (line_split[3], line_split[2], line_split[5])])) fdw.close() fdr.close() if __name__ == "__main__": main() <commit_msg>Add infos about MAP file<commit_after># csv2map.py -- Convert .csv into a .map format # Description of MAP format: http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#map # # jean-daniel.granet@mines-paristech.fr import sys import argparse def main(): parser = argparse.ArgumentParser(description='Convert .csv to .map') parser.add_argument('csv', help="""CSV file to convert. Fields are : Index,Illumina_SNP_Name,Alternative_SNP_Name,Chromosome,Build36_Position,Build37_Position,new_rsname,Strand,TopAlleles,ForwardAlleles,DesignAlleles """) parser.add_argument('map', help='MAP file to create. Fields will be : num chromosome (1-22, X, Y or 0 if unplaced), snp identifier, Genetic distance (morgans) = 0, Base-pair position (bp units)') args = parser.parse_args() # read the csv file and convert it into a MAP file with open(args.csv, 'r') as fdr: with open(args.map, 'w') as fdw: for line in fdr: line_split = line.split(',') fdw.write("%s\n" % "\n".join(["%s %s 0 %s" % (line_split[3], line_split[2], line_split[5])])) fdw.close() fdr.close() if __name__ == "__main__": main()
b1f06fc602f2d787b9d564a99d8c92f731ab104c
twext/internet/test/__init__.py
twext/internet/test/__init__.py
## # Copyright (c) 2005-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ##
Build system doesn't approve of empty files
Build system doesn't approve of empty files git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6616 e27351fd-9f3e-4f54-a53b-843176b1656c
Python
apache-2.0
trevor/calendarserver,trevor/calendarserver,trevor/calendarserver
Build system doesn't approve of empty files git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6616 e27351fd-9f3e-4f54-a53b-843176b1656c
## # Copyright (c) 2005-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ##
<commit_before><commit_msg>Build system doesn't approve of empty files git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6616 e27351fd-9f3e-4f54-a53b-843176b1656c<commit_after>
## # Copyright (c) 2005-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ##
Build system doesn't approve of empty files git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6616 e27351fd-9f3e-4f54-a53b-843176b1656c## # Copyright (c) 2005-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ##
<commit_before><commit_msg>Build system doesn't approve of empty files git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6616 e27351fd-9f3e-4f54-a53b-843176b1656c<commit_after>## # Copyright (c) 2005-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ##
75225c176135b6d17c8f10ea67dabb4b0fc02505
nodeconductor/iaas/migrations/0009_add_min_ram_and_disk_to_image.py
nodeconductor/iaas/migrations/0009_add_min_ram_and_disk_to_image.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django_fsm class Migration(migrations.Migration): dependencies = [ ('iaas', '0008_add_instance_restarting_state'), ] operations = [ migrations.AddField( model_name='image', name='min_disk', field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'), preserve_default=True, ), migrations.AddField( model_name='image', name='min_ram', field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'), preserve_default=True, ), migrations.AlterField( model_name='instance', name='state', field=django_fsm.FSMIntegerField(default=1, help_text='WARNING! Should not be changed manually unless you really know what you are doing.', max_length=1, choices=[(1, 'Provisioning Scheduled'), (2, 'Provisioning'), (3, 'Online'), (4, 'Offline'), (5, 'Starting Scheduled'), (6, 'Starting'), (7, 'Stopping Scheduled'), (8, 'Stopping'), (9, 'Erred'), (10, 'Deletion Scheduled'), (11, 'Deleting'), (13, 'Resizing Scheduled'), (14, 'Resizing'), (15, 'Restarting Scheduled'), (16, 'Restarting')]), preserve_default=True, ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django_fsm class Migration(migrations.Migration): dependencies = [ ('iaas', '0008_add_instance_restarting_state'), ] operations = [ migrations.AddField( model_name='image', name='min_disk', field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'), preserve_default=True, ), migrations.AddField( model_name='image', name='min_ram', field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'), preserve_default=True, ), ]
Remove field duplication from migrations(nc-301)
Remove field duplication from migrations(nc-301)
Python
mit
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django_fsm class Migration(migrations.Migration): dependencies = [ ('iaas', '0008_add_instance_restarting_state'), ] operations = [ migrations.AddField( model_name='image', name='min_disk', field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'), preserve_default=True, ), migrations.AddField( model_name='image', name='min_ram', field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'), preserve_default=True, ), migrations.AlterField( model_name='instance', name='state', field=django_fsm.FSMIntegerField(default=1, help_text='WARNING! Should not be changed manually unless you really know what you are doing.', max_length=1, choices=[(1, 'Provisioning Scheduled'), (2, 'Provisioning'), (3, 'Online'), (4, 'Offline'), (5, 'Starting Scheduled'), (6, 'Starting'), (7, 'Stopping Scheduled'), (8, 'Stopping'), (9, 'Erred'), (10, 'Deletion Scheduled'), (11, 'Deleting'), (13, 'Resizing Scheduled'), (14, 'Resizing'), (15, 'Restarting Scheduled'), (16, 'Restarting')]), preserve_default=True, ), ] Remove field duplication from migrations(nc-301)
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django_fsm class Migration(migrations.Migration): dependencies = [ ('iaas', '0008_add_instance_restarting_state'), ] operations = [ migrations.AddField( model_name='image', name='min_disk', field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'), preserve_default=True, ), migrations.AddField( model_name='image', name='min_ram', field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'), preserve_default=True, ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django_fsm class Migration(migrations.Migration): dependencies = [ ('iaas', '0008_add_instance_restarting_state'), ] operations = [ migrations.AddField( model_name='image', name='min_disk', field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'), preserve_default=True, ), migrations.AddField( model_name='image', name='min_ram', field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'), preserve_default=True, ), migrations.AlterField( model_name='instance', name='state', field=django_fsm.FSMIntegerField(default=1, help_text='WARNING! Should not be changed manually unless you really know what you are doing.', max_length=1, choices=[(1, 'Provisioning Scheduled'), (2, 'Provisioning'), (3, 'Online'), (4, 'Offline'), (5, 'Starting Scheduled'), (6, 'Starting'), (7, 'Stopping Scheduled'), (8, 'Stopping'), (9, 'Erred'), (10, 'Deletion Scheduled'), (11, 'Deleting'), (13, 'Resizing Scheduled'), (14, 'Resizing'), (15, 'Restarting Scheduled'), (16, 'Restarting')]), preserve_default=True, ), ] <commit_msg>Remove field duplication from migrations(nc-301)<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django_fsm class Migration(migrations.Migration): dependencies = [ ('iaas', '0008_add_instance_restarting_state'), ] operations = [ migrations.AddField( model_name='image', name='min_disk', field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'), preserve_default=True, ), migrations.AddField( model_name='image', name='min_ram', field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'), preserve_default=True, ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django_fsm class Migration(migrations.Migration): dependencies = [ ('iaas', '0008_add_instance_restarting_state'), ] operations = [ migrations.AddField( model_name='image', name='min_disk', field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'), preserve_default=True, ), migrations.AddField( model_name='image', name='min_ram', field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'), preserve_default=True, ), migrations.AlterField( model_name='instance', name='state', field=django_fsm.FSMIntegerField(default=1, help_text='WARNING! Should not be changed manually unless you really know what you are doing.', max_length=1, choices=[(1, 'Provisioning Scheduled'), (2, 'Provisioning'), (3, 'Online'), (4, 'Offline'), (5, 'Starting Scheduled'), (6, 'Starting'), (7, 'Stopping Scheduled'), (8, 'Stopping'), (9, 'Erred'), (10, 'Deletion Scheduled'), (11, 'Deleting'), (13, 'Resizing Scheduled'), (14, 'Resizing'), (15, 'Restarting Scheduled'), (16, 'Restarting')]), preserve_default=True, ), ] Remove field duplication from migrations(nc-301)# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django_fsm class Migration(migrations.Migration): dependencies = [ ('iaas', '0008_add_instance_restarting_state'), ] operations = [ migrations.AddField( model_name='image', name='min_disk', field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'), preserve_default=True, ), migrations.AddField( model_name='image', name='min_ram', field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'), preserve_default=True, ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django_fsm class Migration(migrations.Migration): dependencies = [ ('iaas', '0008_add_instance_restarting_state'), ] operations = [ migrations.AddField( model_name='image', name='min_disk', field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'), preserve_default=True, ), migrations.AddField( model_name='image', name='min_ram', field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'), preserve_default=True, ), migrations.AlterField( model_name='instance', name='state', field=django_fsm.FSMIntegerField(default=1, help_text='WARNING! Should not be changed manually unless you really know what you are doing.', max_length=1, choices=[(1, 'Provisioning Scheduled'), (2, 'Provisioning'), (3, 'Online'), (4, 'Offline'), (5, 'Starting Scheduled'), (6, 'Starting'), (7, 'Stopping Scheduled'), (8, 'Stopping'), (9, 'Erred'), (10, 'Deletion Scheduled'), (11, 'Deleting'), (13, 'Resizing Scheduled'), (14, 'Resizing'), (15, 'Restarting Scheduled'), (16, 'Restarting')]), preserve_default=True, ), ] <commit_msg>Remove field duplication from migrations(nc-301)<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django_fsm class Migration(migrations.Migration): dependencies = [ ('iaas', '0008_add_instance_restarting_state'), ] operations = [ migrations.AddField( model_name='image', name='min_disk', field=models.PositiveIntegerField(default=0, help_text='Minimum disk size in MiB'), preserve_default=True, ), migrations.AddField( model_name='image', name='min_ram', field=models.PositiveIntegerField(default=0, help_text='Minimum memory size in MiB'), preserve_default=True, ), ]
496fc83101155bd0cd2fb4256e2878007aec8eaa
api/base/exceptions.py
api/base/exceptions.py
from rest_framework import status from rest_framework.exceptions import APIException def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for key, value in message.iteritems(): if key in top_level_error_keys: errors.append({key: value}) else: if isinstance(value, list): for reason in value: errors.append({'detail': reason, 'meta': {'field': key}}) else: errors.append({'detail': value, 'meta': {'field': key}}) elif isinstance(message, (list, tuple)): for error in message: errors.append({'detail': error}) else: errors.append({'detail': message}) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.')
from rest_framework import status from rest_framework.exceptions import APIException def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for key, value in message.iteritems(): if key in top_level_error_keys: errors.append({key: value}) else: if isinstance(value, list): for reason in value: errors.append({'source': {key: reason}}) else: errors.append({'source': {key: value}}) elif isinstance(message, (list, tuple)): for error in message: errors.append({'detail': error}) else: errors.append({'detail': message}) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.')
Use source key instead of meta and detail fields
Use source key instead of meta and detail fields
Python
apache-2.0
asanfilippo7/osf.io,emetsger/osf.io,rdhyee/osf.io,cosenal/osf.io,haoyuchen1992/osf.io,haoyuchen1992/osf.io,DanielSBrown/osf.io,doublebits/osf.io,mluke93/osf.io,amyshi188/osf.io,monikagrabowska/osf.io,doublebits/osf.io,icereval/osf.io,sloria/osf.io,pattisdr/osf.io,ZobairAlijan/osf.io,adlius/osf.io,hmoco/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,mluke93/osf.io,cosenal/osf.io,chrisseto/osf.io,caseyrygt/osf.io,caseyrollins/osf.io,GageGaskins/osf.io,saradbowman/osf.io,HalcyonChimera/osf.io,petermalcolm/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,zamattiac/osf.io,chrisseto/osf.io,samchrisinger/osf.io,brandonPurvis/osf.io,Ghalko/osf.io,petermalcolm/osf.io,alexschiller/osf.io,felliott/osf.io,cwisecarver/osf.io,KAsante95/osf.io,DanielSBrown/osf.io,aaxelb/osf.io,hmoco/osf.io,kch8qx/osf.io,zachjanicki/osf.io,baylee-d/osf.io,emetsger/osf.io,alexschiller/osf.io,samanehsan/osf.io,billyhunt/osf.io,amyshi188/osf.io,mluo613/osf.io,abought/osf.io,KAsante95/osf.io,kwierman/osf.io,ticklemepierce/osf.io,aaxelb/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,zachjanicki/osf.io,acshi/osf.io,chennan47/osf.io,ZobairAlijan/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,mattclark/osf.io,kwierman/osf.io,cwisecarver/osf.io,DanielSBrown/osf.io,kwierman/osf.io,Nesiehr/osf.io,mluo613/osf.io,samanehsan/osf.io,mluke93/osf.io,RomanZWang/osf.io,Ghalko/osf.io,alexschiller/osf.io,ZobairAlijan/osf.io,danielneis/osf.io,mluo613/osf.io,TomBaxter/osf.io,chrisseto/osf.io,chennan47/osf.io,RomanZWang/osf.io,billyhunt/osf.io,mattclark/osf.io,chennan47/osf.io,saradbowman/osf.io,njantrania/osf.io,wearpants/osf.io,GageGaskins/osf.io,danielneis/osf.io,brianjgeiger/osf.io,felliott/osf.io,billyhunt/osf.io,TomBaxter/osf.io,leb2dg/osf.io,wearpants/osf.io,cwisecarver/osf.io,erinspace/osf.io,doublebits/osf.io,wearpants/osf.io,abought/osf.io,monikagrabowska/osf.io,emetsger/osf.io,icereval/osf.io,alexschiller/osf.io,amyshi188/osf.io,binoculars/osf.io,haoyuchen1992/osf.io,kch8qx/osf.io,RomanZWang/osf.io,acshi/osf.io,njantrania/osf.io,leb2dg/osf.io,kch8qx/osf.io,cslzchen/osf.io,mluo613/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,Ghalko/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,njantrania/osf.io,brianjgeiger/osf.io,caseyrygt/osf.io,monikagrabowska/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,zachjanicki/osf.io,acshi/osf.io,hmoco/osf.io,GageGaskins/osf.io,cslzchen/osf.io,petermalcolm/osf.io,mattclark/osf.io,monikagrabowska/osf.io,doublebits/osf.io,ticklemepierce/osf.io,arpitar/osf.io,caseyrollins/osf.io,billyhunt/osf.io,Nesiehr/osf.io,ticklemepierce/osf.io,cosenal/osf.io,caseyrygt/osf.io,erinspace/osf.io,laurenrevere/osf.io,leb2dg/osf.io,rdhyee/osf.io,Johnetordoff/osf.io,SSJohns/osf.io,laurenrevere/osf.io,mluo613/osf.io,zamattiac/osf.io,SSJohns/osf.io,jnayak1/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,icereval/osf.io,emetsger/osf.io,pattisdr/osf.io,njantrania/osf.io,amyshi188/osf.io,adlius/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,Nesiehr/osf.io,crcresearch/osf.io,pattisdr/osf.io,asanfilippo7/osf.io,caneruguz/osf.io,mfraezz/osf.io,brandonPurvis/osf.io,zamattiac/osf.io,wearpants/osf.io,rdhyee/osf.io,KAsante95/osf.io,baylee-d/osf.io,jnayak1/osf.io,jnayak1/osf.io,doublebits/osf.io,cwisecarver/osf.io,mfraezz/osf.io,caneruguz/osf.io,kch8qx/osf.io,crcresearch/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,SSJohns/osf.io,sloria/osf.io,Nesiehr/osf.io,samanehsan/osf.io,jnayak1/osf.io,danielneis/osf.io,arpitar/osf.io,baylee-d/osf.io,sloria/osf.io,kch8qx/osf.io,cslzchen/osf.io,binoculars/osf.io,acshi/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,Johnetordoff/osf.io,mluke93/osf.io,petermalcolm/osf.io,asanfilippo7/osf.io,kwierman/osf.io,abought/osf.io,caseyrygt/osf.io,Ghalko/osf.io,erinspace/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,zamattiac/osf.io,haoyuchen1992/osf.io,mfraezz/osf.io,TomHeatwole/osf.io,acshi/osf.io,leb2dg/osf.io,laurenrevere/osf.io,TomHeatwole/osf.io,danielneis/osf.io,crcresearch/osf.io,samchrisinger/osf.io,abought/osf.io,adlius/osf.io,asanfilippo7/osf.io,adlius/osf.io,TomBaxter/osf.io,aaxelb/osf.io,cosenal/osf.io,felliott/osf.io,brandonPurvis/osf.io,felliott/osf.io,RomanZWang/osf.io,arpitar/osf.io,binoculars/osf.io,TomHeatwole/osf.io,GageGaskins/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,SSJohns/osf.io,caneruguz/osf.io,samchrisinger/osf.io,hmoco/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,arpitar/osf.io
from rest_framework import status from rest_framework.exceptions import APIException def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for key, value in message.iteritems(): if key in top_level_error_keys: errors.append({key: value}) else: if isinstance(value, list): for reason in value: errors.append({'detail': reason, 'meta': {'field': key}}) else: errors.append({'detail': value, 'meta': {'field': key}}) elif isinstance(message, (list, tuple)): for error in message: errors.append({'detail': error}) else: errors.append({'detail': message}) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.') Use source key instead of meta and detail fields
from rest_framework import status from rest_framework.exceptions import APIException def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for key, value in message.iteritems(): if key in top_level_error_keys: errors.append({key: value}) else: if isinstance(value, list): for reason in value: errors.append({'source': {key: reason}}) else: errors.append({'source': {key: value}}) elif isinstance(message, (list, tuple)): for error in message: errors.append({'detail': error}) else: errors.append({'detail': message}) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.')
<commit_before> from rest_framework import status from rest_framework.exceptions import APIException def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for key, value in message.iteritems(): if key in top_level_error_keys: errors.append({key: value}) else: if isinstance(value, list): for reason in value: errors.append({'detail': reason, 'meta': {'field': key}}) else: errors.append({'detail': value, 'meta': {'field': key}}) elif isinstance(message, (list, tuple)): for error in message: errors.append({'detail': error}) else: errors.append({'detail': message}) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.') <commit_msg>Use source key instead of meta and detail fields<commit_after>
from rest_framework import status from rest_framework.exceptions import APIException def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for key, value in message.iteritems(): if key in top_level_error_keys: errors.append({key: value}) else: if isinstance(value, list): for reason in value: errors.append({'source': {key: reason}}) else: errors.append({'source': {key: value}}) elif isinstance(message, (list, tuple)): for error in message: errors.append({'detail': error}) else: errors.append({'detail': message}) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.')
from rest_framework import status from rest_framework.exceptions import APIException def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for key, value in message.iteritems(): if key in top_level_error_keys: errors.append({key: value}) else: if isinstance(value, list): for reason in value: errors.append({'detail': reason, 'meta': {'field': key}}) else: errors.append({'detail': value, 'meta': {'field': key}}) elif isinstance(message, (list, tuple)): for error in message: errors.append({'detail': error}) else: errors.append({'detail': message}) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.') Use source key instead of meta and detail fields from rest_framework import status from rest_framework.exceptions import APIException def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for key, value in message.iteritems(): if key in top_level_error_keys: errors.append({key: value}) else: if isinstance(value, list): for reason in value: errors.append({'source': {key: reason}}) else: errors.append({'source': {key: value}}) elif isinstance(message, (list, tuple)): for error in message: errors.append({'detail': error}) else: errors.append({'detail': message}) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.')
<commit_before> from rest_framework import status from rest_framework.exceptions import APIException def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for key, value in message.iteritems(): if key in top_level_error_keys: errors.append({key: value}) else: if isinstance(value, list): for reason in value: errors.append({'detail': reason, 'meta': {'field': key}}) else: errors.append({'detail': value, 'meta': {'field': key}}) elif isinstance(message, (list, tuple)): for error in message: errors.append({'detail': error}) else: errors.append({'detail': message}) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.') <commit_msg>Use source key instead of meta and detail fields<commit_after> from rest_framework import status from rest_framework.exceptions import APIException def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) # Error objects may have the following members. Title removed to avoid clash with node "title" errors. top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response: message = response.data if isinstance(message, dict): for key, value in message.iteritems(): if key in top_level_error_keys: errors.append({key: value}) else: if isinstance(value, list): for reason in value: errors.append({'source': {key: reason}}) else: errors.append({'source': {key: value}}) elif isinstance(message, (list, tuple)): for error in message: errors.append({'detail': error}) else: errors.append({'detail': message}) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.')
0223b6fc332bdbc8a641832ebd06b79969b65853
pyfibot/modules/module_btc.py
pyfibot/modules/module_btc.py
# -*- encoding: utf-8 -*- from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display BTC exchange rates""" r = bot.get_url("http://bitcoincharts.com/t/weighted_prices.json") data = r.json() eur_rate = float(data['EUR']['24h']) usd_rate = float(data['USD']['24h']) return bot.say(channel, "1 BTC = $%.2f / %.2f€" % (usd_rate, eur_rate))
# -*- encoding: utf-8 -*- from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display current BTC exchange rates from mtgox""" r = bot.get_url("http://data.mtgox.com/api/1/BTCUSD/ticker") btcusd = r.json()['return']['avg']['display_short'] r = bot.get_url("http://data.mtgox.com/api/1/BTCEUR/ticker") btceur = r.json()['return']['avg']['display_short'] return bot.say(channel, "1 BTC = %s / %s" % (btcusd, btceur))
Use mtgox as data source
Use mtgox as data source
Python
bsd-3-clause
rnyberg/pyfibot,lepinkainen/pyfibot,EArmour/pyfibot,huqa/pyfibot,aapa/pyfibot,huqa/pyfibot,rnyberg/pyfibot,EArmour/pyfibot,lepinkainen/pyfibot,aapa/pyfibot
# -*- encoding: utf-8 -*- from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display BTC exchange rates""" r = bot.get_url("http://bitcoincharts.com/t/weighted_prices.json") data = r.json() eur_rate = float(data['EUR']['24h']) usd_rate = float(data['USD']['24h']) return bot.say(channel, "1 BTC = $%.2f / %.2f€" % (usd_rate, eur_rate)) Use mtgox as data source
# -*- encoding: utf-8 -*- from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display current BTC exchange rates from mtgox""" r = bot.get_url("http://data.mtgox.com/api/1/BTCUSD/ticker") btcusd = r.json()['return']['avg']['display_short'] r = bot.get_url("http://data.mtgox.com/api/1/BTCEUR/ticker") btceur = r.json()['return']['avg']['display_short'] return bot.say(channel, "1 BTC = %s / %s" % (btcusd, btceur))
<commit_before># -*- encoding: utf-8 -*- from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display BTC exchange rates""" r = bot.get_url("http://bitcoincharts.com/t/weighted_prices.json") data = r.json() eur_rate = float(data['EUR']['24h']) usd_rate = float(data['USD']['24h']) return bot.say(channel, "1 BTC = $%.2f / %.2f€" % (usd_rate, eur_rate)) <commit_msg>Use mtgox as data source<commit_after>
# -*- encoding: utf-8 -*- from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display current BTC exchange rates from mtgox""" r = bot.get_url("http://data.mtgox.com/api/1/BTCUSD/ticker") btcusd = r.json()['return']['avg']['display_short'] r = bot.get_url("http://data.mtgox.com/api/1/BTCEUR/ticker") btceur = r.json()['return']['avg']['display_short'] return bot.say(channel, "1 BTC = %s / %s" % (btcusd, btceur))
# -*- encoding: utf-8 -*- from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display BTC exchange rates""" r = bot.get_url("http://bitcoincharts.com/t/weighted_prices.json") data = r.json() eur_rate = float(data['EUR']['24h']) usd_rate = float(data['USD']['24h']) return bot.say(channel, "1 BTC = $%.2f / %.2f€" % (usd_rate, eur_rate)) Use mtgox as data source# -*- encoding: utf-8 -*- from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display current BTC exchange rates from mtgox""" r = bot.get_url("http://data.mtgox.com/api/1/BTCUSD/ticker") btcusd = r.json()['return']['avg']['display_short'] r = bot.get_url("http://data.mtgox.com/api/1/BTCEUR/ticker") btceur = r.json()['return']['avg']['display_short'] return bot.say(channel, "1 BTC = %s / %s" % (btcusd, btceur))
<commit_before># -*- encoding: utf-8 -*- from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display BTC exchange rates""" r = bot.get_url("http://bitcoincharts.com/t/weighted_prices.json") data = r.json() eur_rate = float(data['EUR']['24h']) usd_rate = float(data['USD']['24h']) return bot.say(channel, "1 BTC = $%.2f / %.2f€" % (usd_rate, eur_rate)) <commit_msg>Use mtgox as data source<commit_after># -*- encoding: utf-8 -*- from __future__ import unicode_literals, print_function, division def command_btc(bot, user, channel, args): """Display current BTC exchange rates from mtgox""" r = bot.get_url("http://data.mtgox.com/api/1/BTCUSD/ticker") btcusd = r.json()['return']['avg']['display_short'] r = bot.get_url("http://data.mtgox.com/api/1/BTCEUR/ticker") btceur = r.json()['return']['avg']['display_short'] return bot.say(channel, "1 BTC = %s / %s" % (btcusd, btceur))
0868bbd0e445cb39217351cd13d2c3f7a173416c
mws/__init__.py
mws/__init__.py
# -*- coding: utf-8 -*- from __future__ import absolute_import from .mws import MWS, MWSError from .apis import Feeds, Finances, InboundShipments, Inventory, MerchantFulfillment,\ OffAmazonPayments, Orders, OutboundShipments, Products, Recommendations,\ Reports, Sellers __all__ = [ 'Feeds', 'Finances', 'InboundShipments', 'Inventory', 'MerchantFulfillment', 'MWS', 'MWSError', 'OffAmazonPayments', 'Orders', 'OutboundShipments', 'Products', 'Recommendations', 'Reports', 'Sellers', # TODO Add Subscriptions ]
# -*- coding: utf-8 -*- from __future__ import absolute_import from .mws import MWS, MWSError from .apis import Feeds, Finances, InboundShipments, Inventory, MerchantFulfillment,\ OffAmazonPayments, Orders, OutboundShipments, Products, Recommendations,\ Reports, Sellers, Subscriptions __all__ = [ 'Feeds', 'Finances', 'InboundShipments', 'Inventory', 'MerchantFulfillment', 'MWS', 'MWSError', 'OffAmazonPayments', 'Orders', 'OutboundShipments', 'Products', 'Recommendations', 'Reports', 'Sellers', 'Subscriptions', ]
Include the new Subscriptions stub
Include the new Subscriptions stub
Python
unlicense
GriceTurrble/python-amazon-mws,Bobspadger/python-amazon-mws
# -*- coding: utf-8 -*- from __future__ import absolute_import from .mws import MWS, MWSError from .apis import Feeds, Finances, InboundShipments, Inventory, MerchantFulfillment,\ OffAmazonPayments, Orders, OutboundShipments, Products, Recommendations,\ Reports, Sellers __all__ = [ 'Feeds', 'Finances', 'InboundShipments', 'Inventory', 'MerchantFulfillment', 'MWS', 'MWSError', 'OffAmazonPayments', 'Orders', 'OutboundShipments', 'Products', 'Recommendations', 'Reports', 'Sellers', # TODO Add Subscriptions ] Include the new Subscriptions stub
# -*- coding: utf-8 -*- from __future__ import absolute_import from .mws import MWS, MWSError from .apis import Feeds, Finances, InboundShipments, Inventory, MerchantFulfillment,\ OffAmazonPayments, Orders, OutboundShipments, Products, Recommendations,\ Reports, Sellers, Subscriptions __all__ = [ 'Feeds', 'Finances', 'InboundShipments', 'Inventory', 'MerchantFulfillment', 'MWS', 'MWSError', 'OffAmazonPayments', 'Orders', 'OutboundShipments', 'Products', 'Recommendations', 'Reports', 'Sellers', 'Subscriptions', ]
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import from .mws import MWS, MWSError from .apis import Feeds, Finances, InboundShipments, Inventory, MerchantFulfillment,\ OffAmazonPayments, Orders, OutboundShipments, Products, Recommendations,\ Reports, Sellers __all__ = [ 'Feeds', 'Finances', 'InboundShipments', 'Inventory', 'MerchantFulfillment', 'MWS', 'MWSError', 'OffAmazonPayments', 'Orders', 'OutboundShipments', 'Products', 'Recommendations', 'Reports', 'Sellers', # TODO Add Subscriptions ] <commit_msg>Include the new Subscriptions stub<commit_after>
# -*- coding: utf-8 -*- from __future__ import absolute_import from .mws import MWS, MWSError from .apis import Feeds, Finances, InboundShipments, Inventory, MerchantFulfillment,\ OffAmazonPayments, Orders, OutboundShipments, Products, Recommendations,\ Reports, Sellers, Subscriptions __all__ = [ 'Feeds', 'Finances', 'InboundShipments', 'Inventory', 'MerchantFulfillment', 'MWS', 'MWSError', 'OffAmazonPayments', 'Orders', 'OutboundShipments', 'Products', 'Recommendations', 'Reports', 'Sellers', 'Subscriptions', ]
# -*- coding: utf-8 -*- from __future__ import absolute_import from .mws import MWS, MWSError from .apis import Feeds, Finances, InboundShipments, Inventory, MerchantFulfillment,\ OffAmazonPayments, Orders, OutboundShipments, Products, Recommendations,\ Reports, Sellers __all__ = [ 'Feeds', 'Finances', 'InboundShipments', 'Inventory', 'MerchantFulfillment', 'MWS', 'MWSError', 'OffAmazonPayments', 'Orders', 'OutboundShipments', 'Products', 'Recommendations', 'Reports', 'Sellers', # TODO Add Subscriptions ] Include the new Subscriptions stub# -*- coding: utf-8 -*- from __future__ import absolute_import from .mws import MWS, MWSError from .apis import Feeds, Finances, InboundShipments, Inventory, MerchantFulfillment,\ OffAmazonPayments, Orders, OutboundShipments, Products, Recommendations,\ Reports, Sellers, Subscriptions __all__ = [ 'Feeds', 'Finances', 'InboundShipments', 'Inventory', 'MerchantFulfillment', 'MWS', 'MWSError', 'OffAmazonPayments', 'Orders', 'OutboundShipments', 'Products', 'Recommendations', 'Reports', 'Sellers', 'Subscriptions', ]
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import from .mws import MWS, MWSError from .apis import Feeds, Finances, InboundShipments, Inventory, MerchantFulfillment,\ OffAmazonPayments, Orders, OutboundShipments, Products, Recommendations,\ Reports, Sellers __all__ = [ 'Feeds', 'Finances', 'InboundShipments', 'Inventory', 'MerchantFulfillment', 'MWS', 'MWSError', 'OffAmazonPayments', 'Orders', 'OutboundShipments', 'Products', 'Recommendations', 'Reports', 'Sellers', # TODO Add Subscriptions ] <commit_msg>Include the new Subscriptions stub<commit_after># -*- coding: utf-8 -*- from __future__ import absolute_import from .mws import MWS, MWSError from .apis import Feeds, Finances, InboundShipments, Inventory, MerchantFulfillment,\ OffAmazonPayments, Orders, OutboundShipments, Products, Recommendations,\ Reports, Sellers, Subscriptions __all__ = [ 'Feeds', 'Finances', 'InboundShipments', 'Inventory', 'MerchantFulfillment', 'MWS', 'MWSError', 'OffAmazonPayments', 'Orders', 'OutboundShipments', 'Products', 'Recommendations', 'Reports', 'Sellers', 'Subscriptions', ]
a91a81c16a27d72cdc41de322130e97889657561
marbaloo_mako/__init__.py
marbaloo_mako/__init__.py
import cherrypy from mako.lookup import TemplateLookup class Tool(cherrypy.Tool): _lookups = {} def __init__(self): cherrypy.Tool.__init__(self, 'before_handler', self.callable, priority=40) def callable(self, filename=None, directories=None, module_directory=None, collection_size=-1): if filename is None or directories is None: return # Find the appropriate template lookup. key = (tuple(directories), module_directory) try: lookup = self._lookups[key] except KeyError: lookup = TemplateLookup(directories=directories, module_directory=module_directory, collection_size=collection_size, input_encoding='utf8') self._lookups[key] = lookup cherrypy.request.lookup = lookup # Replace the current handler. cherrypy.request.template = template = lookup.get_template(filename) inner_handler = cherrypy.serving.request.handler def wrapper(*args, **kwargs): context = inner_handler(*args, **kwargs) response = template.render(**context) return response cherrypy.serving.request.handler = wrapper
import cherrypy from mako.lookup import TemplateLookup class Tool(cherrypy.Tool): _lookups = {} def __init__(self): cherrypy.Tool.__init__(self, 'before_handler', self.callable, priority=40) def callable(self, filename=None, directories=None, module_directory=None, collection_size=-1): if filename is None or directories is None: return # Find the appropriate template lookup. key = (tuple(directories), module_directory) try: lookup = self._lookups[key] except KeyError: lookup = TemplateLookup(directories=directories, module_directory=module_directory, collection_size=collection_size, input_encoding='utf8') self._lookups[key] = lookup cherrypy.request.lookup = lookup cherrypy.request.template = lookup.get_template(filename) # Replace the current handler. inner_handler = cherrypy.serving.request.handler def wrapper(*args, **kwargs): context = inner_handler(*args, **kwargs) response = cherrypy.request.template.render(**context) return response cherrypy.serving.request.handler = wrapper
Load template from `cherrypy.request.template` in handler.
Load template from `cherrypy.request.template` in handler.
Python
mit
marbaloo/marbaloo_mako
import cherrypy from mako.lookup import TemplateLookup class Tool(cherrypy.Tool): _lookups = {} def __init__(self): cherrypy.Tool.__init__(self, 'before_handler', self.callable, priority=40) def callable(self, filename=None, directories=None, module_directory=None, collection_size=-1): if filename is None or directories is None: return # Find the appropriate template lookup. key = (tuple(directories), module_directory) try: lookup = self._lookups[key] except KeyError: lookup = TemplateLookup(directories=directories, module_directory=module_directory, collection_size=collection_size, input_encoding='utf8') self._lookups[key] = lookup cherrypy.request.lookup = lookup # Replace the current handler. cherrypy.request.template = template = lookup.get_template(filename) inner_handler = cherrypy.serving.request.handler def wrapper(*args, **kwargs): context = inner_handler(*args, **kwargs) response = template.render(**context) return response cherrypy.serving.request.handler = wrapper Load template from `cherrypy.request.template` in handler.
import cherrypy from mako.lookup import TemplateLookup class Tool(cherrypy.Tool): _lookups = {} def __init__(self): cherrypy.Tool.__init__(self, 'before_handler', self.callable, priority=40) def callable(self, filename=None, directories=None, module_directory=None, collection_size=-1): if filename is None or directories is None: return # Find the appropriate template lookup. key = (tuple(directories), module_directory) try: lookup = self._lookups[key] except KeyError: lookup = TemplateLookup(directories=directories, module_directory=module_directory, collection_size=collection_size, input_encoding='utf8') self._lookups[key] = lookup cherrypy.request.lookup = lookup cherrypy.request.template = lookup.get_template(filename) # Replace the current handler. inner_handler = cherrypy.serving.request.handler def wrapper(*args, **kwargs): context = inner_handler(*args, **kwargs) response = cherrypy.request.template.render(**context) return response cherrypy.serving.request.handler = wrapper
<commit_before>import cherrypy from mako.lookup import TemplateLookup class Tool(cherrypy.Tool): _lookups = {} def __init__(self): cherrypy.Tool.__init__(self, 'before_handler', self.callable, priority=40) def callable(self, filename=None, directories=None, module_directory=None, collection_size=-1): if filename is None or directories is None: return # Find the appropriate template lookup. key = (tuple(directories), module_directory) try: lookup = self._lookups[key] except KeyError: lookup = TemplateLookup(directories=directories, module_directory=module_directory, collection_size=collection_size, input_encoding='utf8') self._lookups[key] = lookup cherrypy.request.lookup = lookup # Replace the current handler. cherrypy.request.template = template = lookup.get_template(filename) inner_handler = cherrypy.serving.request.handler def wrapper(*args, **kwargs): context = inner_handler(*args, **kwargs) response = template.render(**context) return response cherrypy.serving.request.handler = wrapper <commit_msg>Load template from `cherrypy.request.template` in handler.<commit_after>
import cherrypy from mako.lookup import TemplateLookup class Tool(cherrypy.Tool): _lookups = {} def __init__(self): cherrypy.Tool.__init__(self, 'before_handler', self.callable, priority=40) def callable(self, filename=None, directories=None, module_directory=None, collection_size=-1): if filename is None or directories is None: return # Find the appropriate template lookup. key = (tuple(directories), module_directory) try: lookup = self._lookups[key] except KeyError: lookup = TemplateLookup(directories=directories, module_directory=module_directory, collection_size=collection_size, input_encoding='utf8') self._lookups[key] = lookup cherrypy.request.lookup = lookup cherrypy.request.template = lookup.get_template(filename) # Replace the current handler. inner_handler = cherrypy.serving.request.handler def wrapper(*args, **kwargs): context = inner_handler(*args, **kwargs) response = cherrypy.request.template.render(**context) return response cherrypy.serving.request.handler = wrapper
import cherrypy from mako.lookup import TemplateLookup class Tool(cherrypy.Tool): _lookups = {} def __init__(self): cherrypy.Tool.__init__(self, 'before_handler', self.callable, priority=40) def callable(self, filename=None, directories=None, module_directory=None, collection_size=-1): if filename is None or directories is None: return # Find the appropriate template lookup. key = (tuple(directories), module_directory) try: lookup = self._lookups[key] except KeyError: lookup = TemplateLookup(directories=directories, module_directory=module_directory, collection_size=collection_size, input_encoding='utf8') self._lookups[key] = lookup cherrypy.request.lookup = lookup # Replace the current handler. cherrypy.request.template = template = lookup.get_template(filename) inner_handler = cherrypy.serving.request.handler def wrapper(*args, **kwargs): context = inner_handler(*args, **kwargs) response = template.render(**context) return response cherrypy.serving.request.handler = wrapper Load template from `cherrypy.request.template` in handler.import cherrypy from mako.lookup import TemplateLookup class Tool(cherrypy.Tool): _lookups = {} def __init__(self): cherrypy.Tool.__init__(self, 'before_handler', self.callable, priority=40) def callable(self, filename=None, directories=None, module_directory=None, collection_size=-1): if filename is None or directories is None: return # Find the appropriate template lookup. key = (tuple(directories), module_directory) try: lookup = self._lookups[key] except KeyError: lookup = TemplateLookup(directories=directories, module_directory=module_directory, collection_size=collection_size, input_encoding='utf8') self._lookups[key] = lookup cherrypy.request.lookup = lookup cherrypy.request.template = lookup.get_template(filename) # Replace the current handler. inner_handler = cherrypy.serving.request.handler def wrapper(*args, **kwargs): context = inner_handler(*args, **kwargs) response = cherrypy.request.template.render(**context) return response cherrypy.serving.request.handler = wrapper
<commit_before>import cherrypy from mako.lookup import TemplateLookup class Tool(cherrypy.Tool): _lookups = {} def __init__(self): cherrypy.Tool.__init__(self, 'before_handler', self.callable, priority=40) def callable(self, filename=None, directories=None, module_directory=None, collection_size=-1): if filename is None or directories is None: return # Find the appropriate template lookup. key = (tuple(directories), module_directory) try: lookup = self._lookups[key] except KeyError: lookup = TemplateLookup(directories=directories, module_directory=module_directory, collection_size=collection_size, input_encoding='utf8') self._lookups[key] = lookup cherrypy.request.lookup = lookup # Replace the current handler. cherrypy.request.template = template = lookup.get_template(filename) inner_handler = cherrypy.serving.request.handler def wrapper(*args, **kwargs): context = inner_handler(*args, **kwargs) response = template.render(**context) return response cherrypy.serving.request.handler = wrapper <commit_msg>Load template from `cherrypy.request.template` in handler.<commit_after>import cherrypy from mako.lookup import TemplateLookup class Tool(cherrypy.Tool): _lookups = {} def __init__(self): cherrypy.Tool.__init__(self, 'before_handler', self.callable, priority=40) def callable(self, filename=None, directories=None, module_directory=None, collection_size=-1): if filename is None or directories is None: return # Find the appropriate template lookup. key = (tuple(directories), module_directory) try: lookup = self._lookups[key] except KeyError: lookup = TemplateLookup(directories=directories, module_directory=module_directory, collection_size=collection_size, input_encoding='utf8') self._lookups[key] = lookup cherrypy.request.lookup = lookup cherrypy.request.template = lookup.get_template(filename) # Replace the current handler. inner_handler = cherrypy.serving.request.handler def wrapper(*args, **kwargs): context = inner_handler(*args, **kwargs) response = cherrypy.request.template.render(**context) return response cherrypy.serving.request.handler = wrapper
90a94b1d511aa17f167d783992fe0f874ad529c1
examples/python_interop/python_interop.py
examples/python_interop/python_interop.py
#!/usr/bin/env python # Copyright 2017 Stanford University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import legion @legion.task def f(ctx): print("inside task f") @legion.task def main_task(ctx): print("%x" % legion.c.legion_runtime_get_executing_processor(ctx.runtime, ctx.context).id) f(ctx)
#!/usr/bin/env python # Copyright 2017 Stanford University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import legion @legion.task def f(ctx, *args): print("inside task f%s" % (args,)) @legion.task def main_task(ctx): print("inside main()") f(ctx, 1, "asdf", True)
Test Python support for arguments.
examples: Test Python support for arguments.
Python
apache-2.0
StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion
#!/usr/bin/env python # Copyright 2017 Stanford University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import legion @legion.task def f(ctx): print("inside task f") @legion.task def main_task(ctx): print("%x" % legion.c.legion_runtime_get_executing_processor(ctx.runtime, ctx.context).id) f(ctx) examples: Test Python support for arguments.
#!/usr/bin/env python # Copyright 2017 Stanford University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import legion @legion.task def f(ctx, *args): print("inside task f%s" % (args,)) @legion.task def main_task(ctx): print("inside main()") f(ctx, 1, "asdf", True)
<commit_before>#!/usr/bin/env python # Copyright 2017 Stanford University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import legion @legion.task def f(ctx): print("inside task f") @legion.task def main_task(ctx): print("%x" % legion.c.legion_runtime_get_executing_processor(ctx.runtime, ctx.context).id) f(ctx) <commit_msg>examples: Test Python support for arguments.<commit_after>
#!/usr/bin/env python # Copyright 2017 Stanford University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import legion @legion.task def f(ctx, *args): print("inside task f%s" % (args,)) @legion.task def main_task(ctx): print("inside main()") f(ctx, 1, "asdf", True)
#!/usr/bin/env python # Copyright 2017 Stanford University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import legion @legion.task def f(ctx): print("inside task f") @legion.task def main_task(ctx): print("%x" % legion.c.legion_runtime_get_executing_processor(ctx.runtime, ctx.context).id) f(ctx) examples: Test Python support for arguments.#!/usr/bin/env python # Copyright 2017 Stanford University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import legion @legion.task def f(ctx, *args): print("inside task f%s" % (args,)) @legion.task def main_task(ctx): print("inside main()") f(ctx, 1, "asdf", True)
<commit_before>#!/usr/bin/env python # Copyright 2017 Stanford University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import legion @legion.task def f(ctx): print("inside task f") @legion.task def main_task(ctx): print("%x" % legion.c.legion_runtime_get_executing_processor(ctx.runtime, ctx.context).id) f(ctx) <commit_msg>examples: Test Python support for arguments.<commit_after>#!/usr/bin/env python # Copyright 2017 Stanford University # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import legion @legion.task def f(ctx, *args): print("inside task f%s" % (args,)) @legion.task def main_task(ctx): print("inside main()") f(ctx, 1, "asdf", True)
f1fcce8f0c2022948fb310268a7769d9c9ef04ad
runtests.py
runtests.py
#!/usr/bin/env python import sys import logging from optparse import OptionParser from coverage import coverage from tests.config import configure logging.disable(logging.CRITICAL) def run_tests(options, *test_args): from django_nose import NoseTestSuiteRunner test_runner = NoseTestSuiteRunner(verbosity=options.verbosity, pdb=options.pdb, ) if not test_args: test_args = ['tests'] num_failures = test_runner.run_tests(test_args) if num_failures: sys.exit(num_failures) if __name__ == '__main__': parser = OptionParser() parser.add_option('-c', '--coverage', dest='use_coverage', default=False, action='store_true', help="Generate coverage report") parser.add_option('-v', '--verbosity', dest='verbosity', default=1, type='int', help="Verbosity of output") parser.add_option('-d', '--pdb', dest='pdb', default=False, action='store_true', help="Whether to drop into PDB on failure/error") (options, args) = parser.parse_args() # If no args, then use 'progressive' plugin to keep the screen real estate # used down to a minimum. Otherwise, use the spec plugin nose_args = ['-s', '-x', '--with-progressive' if not args else '--with-spec'] configure(nose_args) if options.use_coverage: print 'Running tests with coverage' c = coverage(source=['oscar']) c.start() run_tests(options, *args) c.stop() print 'Generate HTML reports' c.html_report() else: run_tests(options, *args)
#!/usr/bin/env python import sys import logging from optparse import OptionParser from tests.config import configure logging.disable(logging.CRITICAL) def run_tests(*test_args): from django_nose import NoseTestSuiteRunner test_runner = NoseTestSuiteRunner() if not test_args: test_args = ['tests'] num_failures = test_runner.run_tests(test_args) if num_failures: sys.exit(num_failures) if __name__ == '__main__': parser = OptionParser() __, args = parser.parse_args() # If no args, then use 'progressive' plugin to keep the screen real estate # used down to a minimum. Otherwise, use the spec plugin nose_args = ['-s', '-x', '--with-progressive' if not args else '--with-spec'] nose_args.extend([ '--with-coverage', '--cover-package=oscar', '--cover-html', '--cover-html-dir=htmlcov']) configure(nose_args) run_tests(*args)
Rework test runner to generate coverage stats correctly.
Rework test runner to generate coverage stats correctly. Nose seems to pick up argv automatically - which is annoying. Will need to look into at some point.
Python
bsd-3-clause
michaelkuty/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,Bogh/django-oscar,sasha0/django-oscar,okfish/django-oscar,nickpack/django-oscar,rocopartners/django-oscar,django-oscar/django-oscar,WillisXChen/django-oscar,makielab/django-oscar,jinnykoo/wuyisj.com,WillisXChen/django-oscar,jinnykoo/wuyisj.com,nfletton/django-oscar,jinnykoo/wuyisj,sasha0/django-oscar,spartonia/django-oscar,pdonadeo/django-oscar,okfish/django-oscar,eddiep1101/django-oscar,pdonadeo/django-oscar,bnprk/django-oscar,manevant/django-oscar,machtfit/django-oscar,ka7eh/django-oscar,sonofatailor/django-oscar,Idematica/django-oscar,marcoantoniooliveira/labweb,MatthewWilkes/django-oscar,pdonadeo/django-oscar,solarissmoke/django-oscar,monikasulik/django-oscar,ka7eh/django-oscar,dongguangming/django-oscar,elliotthill/django-oscar,django-oscar/django-oscar,kapari/django-oscar,eddiep1101/django-oscar,DrOctogon/unwash_ecom,monikasulik/django-oscar,spartonia/django-oscar,ahmetdaglarbas/e-commerce,binarydud/django-oscar,saadatqadri/django-oscar,mexeniz/django-oscar,manevant/django-oscar,thechampanurag/django-oscar,elliotthill/django-oscar,mexeniz/django-oscar,Bogh/django-oscar,jinnykoo/christmas,thechampanurag/django-oscar,QLGu/django-oscar,spartonia/django-oscar,nickpack/django-oscar,QLGu/django-oscar,jinnykoo/wuyisj,Jannes123/django-oscar,marcoantoniooliveira/labweb,bschuon/django-oscar,ahmetdaglarbas/e-commerce,kapt/django-oscar,ademuk/django-oscar,eddiep1101/django-oscar,django-oscar/django-oscar,faratro/django-oscar,adamend/django-oscar,ademuk/django-oscar,anentropic/django-oscar,saadatqadri/django-oscar,MatthewWilkes/django-oscar,jlmadurga/django-oscar,taedori81/django-oscar,taedori81/django-oscar,solarissmoke/django-oscar,DrOctogon/unwash_ecom,monikasulik/django-oscar,vovanbo/django-oscar,kapari/django-oscar,ahmetdaglarbas/e-commerce,sonofatailor/django-oscar,ademuk/django-oscar,john-parton/django-oscar,QLGu/django-oscar,jmt4/django-oscar,marcoantoniooliveira/labweb,josesanch/django-oscar,itbabu/django-oscar,saadatqadri/django-oscar,WillisXChen/django-oscar,monikasulik/django-oscar,itbabu/django-oscar,solarissmoke/django-oscar,kapari/django-oscar,WillisXChen/django-oscar,jmt4/django-oscar,Jannes123/django-oscar,bnprk/django-oscar,nfletton/django-oscar,mexeniz/django-oscar,taedori81/django-oscar,bschuon/django-oscar,vovanbo/django-oscar,anentropic/django-oscar,lijoantony/django-oscar,pasqualguerrero/django-oscar,thechampanurag/django-oscar,bschuon/django-oscar,ka7eh/django-oscar,jinnykoo/christmas,machtfit/django-oscar,saadatqadri/django-oscar,itbabu/django-oscar,lijoantony/django-oscar,kapari/django-oscar,vovanbo/django-oscar,nickpack/django-oscar,amirrpp/django-oscar,kapt/django-oscar,marcoantoniooliveira/labweb,Idematica/django-oscar,jinnykoo/wuyisj,dongguangming/django-oscar,sasha0/django-oscar,sasha0/django-oscar,itbabu/django-oscar,nickpack/django-oscar,michaelkuty/django-oscar,adamend/django-oscar,anentropic/django-oscar,john-parton/django-oscar,binarydud/django-oscar,WadeYuChen/django-oscar,QLGu/django-oscar,taedori81/django-oscar,rocopartners/django-oscar,dongguangming/django-oscar,jlmadurga/django-oscar,rocopartners/django-oscar,MatthewWilkes/django-oscar,jlmadurga/django-oscar,pdonadeo/django-oscar,Bogh/django-oscar,manevant/django-oscar,john-parton/django-oscar,ka7eh/django-oscar,bschuon/django-oscar,bnprk/django-oscar,rocopartners/django-oscar,manevant/django-oscar,faratro/django-oscar,django-oscar/django-oscar,jmt4/django-oscar,Jannes123/django-oscar,josesanch/django-oscar,sonofatailor/django-oscar,okfish/django-oscar,WadeYuChen/django-oscar,pasqualguerrero/django-oscar,michaelkuty/django-oscar,amirrpp/django-oscar,anentropic/django-oscar,Jannes123/django-oscar,nfletton/django-oscar,adamend/django-oscar,vovanbo/django-oscar,makielab/django-oscar,dongguangming/django-oscar,mexeniz/django-oscar,jinnykoo/wuyisj,binarydud/django-oscar,kapt/django-oscar,jinnykoo/wuyisj.com,makielab/django-oscar,binarydud/django-oscar,pasqualguerrero/django-oscar,amirrpp/django-oscar,ahmetdaglarbas/e-commerce,jlmadurga/django-oscar,faratro/django-oscar,john-parton/django-oscar,spartonia/django-oscar,josesanch/django-oscar,WillisXChen/django-oscar,Idematica/django-oscar,solarissmoke/django-oscar,makielab/django-oscar,michaelkuty/django-oscar,machtfit/django-oscar,jinnykoo/christmas,adamend/django-oscar,Bogh/django-oscar,lijoantony/django-oscar,jmt4/django-oscar,sonofatailor/django-oscar,pasqualguerrero/django-oscar,faratro/django-oscar,nfletton/django-oscar,bnprk/django-oscar,okfish/django-oscar,WadeYuChen/django-oscar,thechampanurag/django-oscar,amirrpp/django-oscar,MatthewWilkes/django-oscar,elliotthill/django-oscar,jinnykoo/wuyisj.com,DrOctogon/unwash_ecom,eddiep1101/django-oscar,WadeYuChen/django-oscar,ademuk/django-oscar
#!/usr/bin/env python import sys import logging from optparse import OptionParser from coverage import coverage from tests.config import configure logging.disable(logging.CRITICAL) def run_tests(options, *test_args): from django_nose import NoseTestSuiteRunner test_runner = NoseTestSuiteRunner(verbosity=options.verbosity, pdb=options.pdb, ) if not test_args: test_args = ['tests'] num_failures = test_runner.run_tests(test_args) if num_failures: sys.exit(num_failures) if __name__ == '__main__': parser = OptionParser() parser.add_option('-c', '--coverage', dest='use_coverage', default=False, action='store_true', help="Generate coverage report") parser.add_option('-v', '--verbosity', dest='verbosity', default=1, type='int', help="Verbosity of output") parser.add_option('-d', '--pdb', dest='pdb', default=False, action='store_true', help="Whether to drop into PDB on failure/error") (options, args) = parser.parse_args() # If no args, then use 'progressive' plugin to keep the screen real estate # used down to a minimum. Otherwise, use the spec plugin nose_args = ['-s', '-x', '--with-progressive' if not args else '--with-spec'] configure(nose_args) if options.use_coverage: print 'Running tests with coverage' c = coverage(source=['oscar']) c.start() run_tests(options, *args) c.stop() print 'Generate HTML reports' c.html_report() else: run_tests(options, *args) Rework test runner to generate coverage stats correctly. Nose seems to pick up argv automatically - which is annoying. Will need to look into at some point.
#!/usr/bin/env python import sys import logging from optparse import OptionParser from tests.config import configure logging.disable(logging.CRITICAL) def run_tests(*test_args): from django_nose import NoseTestSuiteRunner test_runner = NoseTestSuiteRunner() if not test_args: test_args = ['tests'] num_failures = test_runner.run_tests(test_args) if num_failures: sys.exit(num_failures) if __name__ == '__main__': parser = OptionParser() __, args = parser.parse_args() # If no args, then use 'progressive' plugin to keep the screen real estate # used down to a minimum. Otherwise, use the spec plugin nose_args = ['-s', '-x', '--with-progressive' if not args else '--with-spec'] nose_args.extend([ '--with-coverage', '--cover-package=oscar', '--cover-html', '--cover-html-dir=htmlcov']) configure(nose_args) run_tests(*args)
<commit_before>#!/usr/bin/env python import sys import logging from optparse import OptionParser from coverage import coverage from tests.config import configure logging.disable(logging.CRITICAL) def run_tests(options, *test_args): from django_nose import NoseTestSuiteRunner test_runner = NoseTestSuiteRunner(verbosity=options.verbosity, pdb=options.pdb, ) if not test_args: test_args = ['tests'] num_failures = test_runner.run_tests(test_args) if num_failures: sys.exit(num_failures) if __name__ == '__main__': parser = OptionParser() parser.add_option('-c', '--coverage', dest='use_coverage', default=False, action='store_true', help="Generate coverage report") parser.add_option('-v', '--verbosity', dest='verbosity', default=1, type='int', help="Verbosity of output") parser.add_option('-d', '--pdb', dest='pdb', default=False, action='store_true', help="Whether to drop into PDB on failure/error") (options, args) = parser.parse_args() # If no args, then use 'progressive' plugin to keep the screen real estate # used down to a minimum. Otherwise, use the spec plugin nose_args = ['-s', '-x', '--with-progressive' if not args else '--with-spec'] configure(nose_args) if options.use_coverage: print 'Running tests with coverage' c = coverage(source=['oscar']) c.start() run_tests(options, *args) c.stop() print 'Generate HTML reports' c.html_report() else: run_tests(options, *args) <commit_msg>Rework test runner to generate coverage stats correctly. Nose seems to pick up argv automatically - which is annoying. Will need to look into at some point.<commit_after>
#!/usr/bin/env python import sys import logging from optparse import OptionParser from tests.config import configure logging.disable(logging.CRITICAL) def run_tests(*test_args): from django_nose import NoseTestSuiteRunner test_runner = NoseTestSuiteRunner() if not test_args: test_args = ['tests'] num_failures = test_runner.run_tests(test_args) if num_failures: sys.exit(num_failures) if __name__ == '__main__': parser = OptionParser() __, args = parser.parse_args() # If no args, then use 'progressive' plugin to keep the screen real estate # used down to a minimum. Otherwise, use the spec plugin nose_args = ['-s', '-x', '--with-progressive' if not args else '--with-spec'] nose_args.extend([ '--with-coverage', '--cover-package=oscar', '--cover-html', '--cover-html-dir=htmlcov']) configure(nose_args) run_tests(*args)
#!/usr/bin/env python import sys import logging from optparse import OptionParser from coverage import coverage from tests.config import configure logging.disable(logging.CRITICAL) def run_tests(options, *test_args): from django_nose import NoseTestSuiteRunner test_runner = NoseTestSuiteRunner(verbosity=options.verbosity, pdb=options.pdb, ) if not test_args: test_args = ['tests'] num_failures = test_runner.run_tests(test_args) if num_failures: sys.exit(num_failures) if __name__ == '__main__': parser = OptionParser() parser.add_option('-c', '--coverage', dest='use_coverage', default=False, action='store_true', help="Generate coverage report") parser.add_option('-v', '--verbosity', dest='verbosity', default=1, type='int', help="Verbosity of output") parser.add_option('-d', '--pdb', dest='pdb', default=False, action='store_true', help="Whether to drop into PDB on failure/error") (options, args) = parser.parse_args() # If no args, then use 'progressive' plugin to keep the screen real estate # used down to a minimum. Otherwise, use the spec plugin nose_args = ['-s', '-x', '--with-progressive' if not args else '--with-spec'] configure(nose_args) if options.use_coverage: print 'Running tests with coverage' c = coverage(source=['oscar']) c.start() run_tests(options, *args) c.stop() print 'Generate HTML reports' c.html_report() else: run_tests(options, *args) Rework test runner to generate coverage stats correctly. Nose seems to pick up argv automatically - which is annoying. Will need to look into at some point.#!/usr/bin/env python import sys import logging from optparse import OptionParser from tests.config import configure logging.disable(logging.CRITICAL) def run_tests(*test_args): from django_nose import NoseTestSuiteRunner test_runner = NoseTestSuiteRunner() if not test_args: test_args = ['tests'] num_failures = test_runner.run_tests(test_args) if num_failures: sys.exit(num_failures) if __name__ == '__main__': parser = OptionParser() __, args = parser.parse_args() # If no args, then use 'progressive' plugin to keep the screen real estate # used down to a minimum. Otherwise, use the spec plugin nose_args = ['-s', '-x', '--with-progressive' if not args else '--with-spec'] nose_args.extend([ '--with-coverage', '--cover-package=oscar', '--cover-html', '--cover-html-dir=htmlcov']) configure(nose_args) run_tests(*args)
<commit_before>#!/usr/bin/env python import sys import logging from optparse import OptionParser from coverage import coverage from tests.config import configure logging.disable(logging.CRITICAL) def run_tests(options, *test_args): from django_nose import NoseTestSuiteRunner test_runner = NoseTestSuiteRunner(verbosity=options.verbosity, pdb=options.pdb, ) if not test_args: test_args = ['tests'] num_failures = test_runner.run_tests(test_args) if num_failures: sys.exit(num_failures) if __name__ == '__main__': parser = OptionParser() parser.add_option('-c', '--coverage', dest='use_coverage', default=False, action='store_true', help="Generate coverage report") parser.add_option('-v', '--verbosity', dest='verbosity', default=1, type='int', help="Verbosity of output") parser.add_option('-d', '--pdb', dest='pdb', default=False, action='store_true', help="Whether to drop into PDB on failure/error") (options, args) = parser.parse_args() # If no args, then use 'progressive' plugin to keep the screen real estate # used down to a minimum. Otherwise, use the spec plugin nose_args = ['-s', '-x', '--with-progressive' if not args else '--with-spec'] configure(nose_args) if options.use_coverage: print 'Running tests with coverage' c = coverage(source=['oscar']) c.start() run_tests(options, *args) c.stop() print 'Generate HTML reports' c.html_report() else: run_tests(options, *args) <commit_msg>Rework test runner to generate coverage stats correctly. Nose seems to pick up argv automatically - which is annoying. Will need to look into at some point.<commit_after>#!/usr/bin/env python import sys import logging from optparse import OptionParser from tests.config import configure logging.disable(logging.CRITICAL) def run_tests(*test_args): from django_nose import NoseTestSuiteRunner test_runner = NoseTestSuiteRunner() if not test_args: test_args = ['tests'] num_failures = test_runner.run_tests(test_args) if num_failures: sys.exit(num_failures) if __name__ == '__main__': parser = OptionParser() __, args = parser.parse_args() # If no args, then use 'progressive' plugin to keep the screen real estate # used down to a minimum. Otherwise, use the spec plugin nose_args = ['-s', '-x', '--with-progressive' if not args else '--with-spec'] nose_args.extend([ '--with-coverage', '--cover-package=oscar', '--cover-html', '--cover-html-dir=htmlcov']) configure(nose_args) run_tests(*args)
3c833053f6da71d1eed6d1a0720a1a8cb1997de7
runtests.py
runtests.py
#!/usr/bin/env python from os.path import dirname, abspath import sys from django.conf import settings if not settings.configured: from django import VERSION settings_dict = dict( INSTALLED_APPS=( 'localeurl', ), ROOT_URLCONF='localeurl.tests.test_urls', ) if VERSION >= (1, 2): settings_dict["DATABASES"] = { "default": { "ENGINE": "django.db.backends.sqlite3" }} else: settings_dict["DATABASE_ENGINE"] = "sqlite3" settings.configure(**settings_dict) def runtests(*test_args): if not test_args: test_args = ['localeurl'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) try: from django.test.simple import DjangoTestSuiteRunner def run_tests(test_args, verbosity, interactive): runner = DjangoTestSuiteRunner( verbosity=verbosity, interactive=interactive) return runner.run_tests(test_args) except ImportError: # for Django versions that don't have DjangoTestSuiteRunner from django.test.simple import run_tests failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
#!/usr/bin/env python from os.path import dirname, abspath import sys from django.conf import settings if not settings.configured: from django import VERSION settings_dict = dict( INSTALLED_APPS=( 'localeurl', 'django.contrib.sites', # for sitemap test ), ROOT_URLCONF='localeurl.tests.test_urls', ) if VERSION >= (1, 2): settings_dict["DATABASES"] = { "default": { "ENGINE": "django.db.backends.sqlite3" }} else: settings_dict["DATABASE_ENGINE"] = "sqlite3" settings.configure(**settings_dict) def runtests(*test_args): if not test_args: test_args = ['localeurl'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) try: from django.test.simple import DjangoTestSuiteRunner def run_tests(test_args, verbosity, interactive): runner = DjangoTestSuiteRunner( verbosity=verbosity, interactive=interactive) return runner.run_tests(test_args) except ImportError: # for Django versions that don't have DjangoTestSuiteRunner from django.test.simple import run_tests failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
Include contrib.sites when running tests; needed for sitemaps in Django >= 1.2.
Include contrib.sites when running tests; needed for sitemaps in Django >= 1.2.
Python
mit
extertioner/django-localeurl,gonnado/django-localeurl,carljm/django-localeurl
#!/usr/bin/env python from os.path import dirname, abspath import sys from django.conf import settings if not settings.configured: from django import VERSION settings_dict = dict( INSTALLED_APPS=( 'localeurl', ), ROOT_URLCONF='localeurl.tests.test_urls', ) if VERSION >= (1, 2): settings_dict["DATABASES"] = { "default": { "ENGINE": "django.db.backends.sqlite3" }} else: settings_dict["DATABASE_ENGINE"] = "sqlite3" settings.configure(**settings_dict) def runtests(*test_args): if not test_args: test_args = ['localeurl'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) try: from django.test.simple import DjangoTestSuiteRunner def run_tests(test_args, verbosity, interactive): runner = DjangoTestSuiteRunner( verbosity=verbosity, interactive=interactive) return runner.run_tests(test_args) except ImportError: # for Django versions that don't have DjangoTestSuiteRunner from django.test.simple import run_tests failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:]) Include contrib.sites when running tests; needed for sitemaps in Django >= 1.2.
#!/usr/bin/env python from os.path import dirname, abspath import sys from django.conf import settings if not settings.configured: from django import VERSION settings_dict = dict( INSTALLED_APPS=( 'localeurl', 'django.contrib.sites', # for sitemap test ), ROOT_URLCONF='localeurl.tests.test_urls', ) if VERSION >= (1, 2): settings_dict["DATABASES"] = { "default": { "ENGINE": "django.db.backends.sqlite3" }} else: settings_dict["DATABASE_ENGINE"] = "sqlite3" settings.configure(**settings_dict) def runtests(*test_args): if not test_args: test_args = ['localeurl'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) try: from django.test.simple import DjangoTestSuiteRunner def run_tests(test_args, verbosity, interactive): runner = DjangoTestSuiteRunner( verbosity=verbosity, interactive=interactive) return runner.run_tests(test_args) except ImportError: # for Django versions that don't have DjangoTestSuiteRunner from django.test.simple import run_tests failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
<commit_before>#!/usr/bin/env python from os.path import dirname, abspath import sys from django.conf import settings if not settings.configured: from django import VERSION settings_dict = dict( INSTALLED_APPS=( 'localeurl', ), ROOT_URLCONF='localeurl.tests.test_urls', ) if VERSION >= (1, 2): settings_dict["DATABASES"] = { "default": { "ENGINE": "django.db.backends.sqlite3" }} else: settings_dict["DATABASE_ENGINE"] = "sqlite3" settings.configure(**settings_dict) def runtests(*test_args): if not test_args: test_args = ['localeurl'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) try: from django.test.simple import DjangoTestSuiteRunner def run_tests(test_args, verbosity, interactive): runner = DjangoTestSuiteRunner( verbosity=verbosity, interactive=interactive) return runner.run_tests(test_args) except ImportError: # for Django versions that don't have DjangoTestSuiteRunner from django.test.simple import run_tests failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:]) <commit_msg>Include contrib.sites when running tests; needed for sitemaps in Django >= 1.2.<commit_after>
#!/usr/bin/env python from os.path import dirname, abspath import sys from django.conf import settings if not settings.configured: from django import VERSION settings_dict = dict( INSTALLED_APPS=( 'localeurl', 'django.contrib.sites', # for sitemap test ), ROOT_URLCONF='localeurl.tests.test_urls', ) if VERSION >= (1, 2): settings_dict["DATABASES"] = { "default": { "ENGINE": "django.db.backends.sqlite3" }} else: settings_dict["DATABASE_ENGINE"] = "sqlite3" settings.configure(**settings_dict) def runtests(*test_args): if not test_args: test_args = ['localeurl'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) try: from django.test.simple import DjangoTestSuiteRunner def run_tests(test_args, verbosity, interactive): runner = DjangoTestSuiteRunner( verbosity=verbosity, interactive=interactive) return runner.run_tests(test_args) except ImportError: # for Django versions that don't have DjangoTestSuiteRunner from django.test.simple import run_tests failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
#!/usr/bin/env python from os.path import dirname, abspath import sys from django.conf import settings if not settings.configured: from django import VERSION settings_dict = dict( INSTALLED_APPS=( 'localeurl', ), ROOT_URLCONF='localeurl.tests.test_urls', ) if VERSION >= (1, 2): settings_dict["DATABASES"] = { "default": { "ENGINE": "django.db.backends.sqlite3" }} else: settings_dict["DATABASE_ENGINE"] = "sqlite3" settings.configure(**settings_dict) def runtests(*test_args): if not test_args: test_args = ['localeurl'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) try: from django.test.simple import DjangoTestSuiteRunner def run_tests(test_args, verbosity, interactive): runner = DjangoTestSuiteRunner( verbosity=verbosity, interactive=interactive) return runner.run_tests(test_args) except ImportError: # for Django versions that don't have DjangoTestSuiteRunner from django.test.simple import run_tests failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:]) Include contrib.sites when running tests; needed for sitemaps in Django >= 1.2.#!/usr/bin/env python from os.path import dirname, abspath import sys from django.conf import settings if not settings.configured: from django import VERSION settings_dict = dict( INSTALLED_APPS=( 'localeurl', 'django.contrib.sites', # for sitemap test ), ROOT_URLCONF='localeurl.tests.test_urls', ) if VERSION >= (1, 2): settings_dict["DATABASES"] = { "default": { "ENGINE": "django.db.backends.sqlite3" }} else: settings_dict["DATABASE_ENGINE"] = "sqlite3" settings.configure(**settings_dict) def runtests(*test_args): if not test_args: test_args = ['localeurl'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) try: from django.test.simple import DjangoTestSuiteRunner def run_tests(test_args, verbosity, interactive): runner = DjangoTestSuiteRunner( verbosity=verbosity, interactive=interactive) return runner.run_tests(test_args) except ImportError: # for Django versions that don't have DjangoTestSuiteRunner from django.test.simple import run_tests failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
<commit_before>#!/usr/bin/env python from os.path import dirname, abspath import sys from django.conf import settings if not settings.configured: from django import VERSION settings_dict = dict( INSTALLED_APPS=( 'localeurl', ), ROOT_URLCONF='localeurl.tests.test_urls', ) if VERSION >= (1, 2): settings_dict["DATABASES"] = { "default": { "ENGINE": "django.db.backends.sqlite3" }} else: settings_dict["DATABASE_ENGINE"] = "sqlite3" settings.configure(**settings_dict) def runtests(*test_args): if not test_args: test_args = ['localeurl'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) try: from django.test.simple import DjangoTestSuiteRunner def run_tests(test_args, verbosity, interactive): runner = DjangoTestSuiteRunner( verbosity=verbosity, interactive=interactive) return runner.run_tests(test_args) except ImportError: # for Django versions that don't have DjangoTestSuiteRunner from django.test.simple import run_tests failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:]) <commit_msg>Include contrib.sites when running tests; needed for sitemaps in Django >= 1.2.<commit_after>#!/usr/bin/env python from os.path import dirname, abspath import sys from django.conf import settings if not settings.configured: from django import VERSION settings_dict = dict( INSTALLED_APPS=( 'localeurl', 'django.contrib.sites', # for sitemap test ), ROOT_URLCONF='localeurl.tests.test_urls', ) if VERSION >= (1, 2): settings_dict["DATABASES"] = { "default": { "ENGINE": "django.db.backends.sqlite3" }} else: settings_dict["DATABASE_ENGINE"] = "sqlite3" settings.configure(**settings_dict) def runtests(*test_args): if not test_args: test_args = ['localeurl'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) try: from django.test.simple import DjangoTestSuiteRunner def run_tests(test_args, verbosity, interactive): runner = DjangoTestSuiteRunner( verbosity=verbosity, interactive=interactive) return runner.run_tests(test_args) except ImportError: # for Django versions that don't have DjangoTestSuiteRunner from django.test.simple import run_tests failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
d873380a3ad382ac359ebf41f3f775f585b674f8
mopidy_gmusic/__init__.py
mopidy_gmusic/__init__.py
from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.2.2' class GMusicExtension(ext.Extension): dist_name = 'Mopidy-GMusic' ext_name = 'gmusic' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(GMusicExtension, self).get_config_schema() schema['username'] = config.String() schema['password'] = config.Secret() schema['deviceid'] = config.String(optional=True) return schema def get_backend_classes(self): from .actor import GMusicBackend return [GMusicBackend]
from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.2.2' class GMusicExtension(ext.Extension): dist_name = 'Mopidy-GMusic' ext_name = 'gmusic' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(GMusicExtension, self).get_config_schema() schema['username'] = config.String() schema['password'] = config.Secret() schema['deviceid'] = config.String(optional=True) return schema def setup(self, registry): from .actor import GMusicBackend registry.add('backend', GMusicBackend)
Use new extension setup() API
Use new extension setup() API
Python
apache-2.0
Tilley/mopidy-gmusic,jodal/mopidy-gmusic,mopidy/mopidy-gmusic,jaibot/mopidy-gmusic,hechtus/mopidy-gmusic,elrosti/mopidy-gmusic,jaapz/mopidy-gmusic
from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.2.2' class GMusicExtension(ext.Extension): dist_name = 'Mopidy-GMusic' ext_name = 'gmusic' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(GMusicExtension, self).get_config_schema() schema['username'] = config.String() schema['password'] = config.Secret() schema['deviceid'] = config.String(optional=True) return schema def get_backend_classes(self): from .actor import GMusicBackend return [GMusicBackend] Use new extension setup() API
from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.2.2' class GMusicExtension(ext.Extension): dist_name = 'Mopidy-GMusic' ext_name = 'gmusic' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(GMusicExtension, self).get_config_schema() schema['username'] = config.String() schema['password'] = config.Secret() schema['deviceid'] = config.String(optional=True) return schema def setup(self, registry): from .actor import GMusicBackend registry.add('backend', GMusicBackend)
<commit_before>from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.2.2' class GMusicExtension(ext.Extension): dist_name = 'Mopidy-GMusic' ext_name = 'gmusic' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(GMusicExtension, self).get_config_schema() schema['username'] = config.String() schema['password'] = config.Secret() schema['deviceid'] = config.String(optional=True) return schema def get_backend_classes(self): from .actor import GMusicBackend return [GMusicBackend] <commit_msg>Use new extension setup() API<commit_after>
from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.2.2' class GMusicExtension(ext.Extension): dist_name = 'Mopidy-GMusic' ext_name = 'gmusic' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(GMusicExtension, self).get_config_schema() schema['username'] = config.String() schema['password'] = config.Secret() schema['deviceid'] = config.String(optional=True) return schema def setup(self, registry): from .actor import GMusicBackend registry.add('backend', GMusicBackend)
from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.2.2' class GMusicExtension(ext.Extension): dist_name = 'Mopidy-GMusic' ext_name = 'gmusic' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(GMusicExtension, self).get_config_schema() schema['username'] = config.String() schema['password'] = config.Secret() schema['deviceid'] = config.String(optional=True) return schema def get_backend_classes(self): from .actor import GMusicBackend return [GMusicBackend] Use new extension setup() APIfrom __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.2.2' class GMusicExtension(ext.Extension): dist_name = 'Mopidy-GMusic' ext_name = 'gmusic' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(GMusicExtension, self).get_config_schema() schema['username'] = config.String() schema['password'] = config.Secret() schema['deviceid'] = config.String(optional=True) return schema def setup(self, registry): from .actor import GMusicBackend registry.add('backend', GMusicBackend)
<commit_before>from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.2.2' class GMusicExtension(ext.Extension): dist_name = 'Mopidy-GMusic' ext_name = 'gmusic' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(GMusicExtension, self).get_config_schema() schema['username'] = config.String() schema['password'] = config.Secret() schema['deviceid'] = config.String(optional=True) return schema def get_backend_classes(self): from .actor import GMusicBackend return [GMusicBackend] <commit_msg>Use new extension setup() API<commit_after>from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.2.2' class GMusicExtension(ext.Extension): dist_name = 'Mopidy-GMusic' ext_name = 'gmusic' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(GMusicExtension, self).get_config_schema() schema['username'] = config.String() schema['password'] = config.Secret() schema['deviceid'] = config.String(optional=True) return schema def setup(self, registry): from .actor import GMusicBackend registry.add('backend', GMusicBackend)
07135d2e24eab7855c55c9213b0653dec23ccbcf
authomatic/__init__.py
authomatic/__init__.py
# -*- coding: utf-8 -*- """ This is the only interface that you should ever need to get a **user** logged in, get **his/her** info and credentials, deserialize the credentials and access **his/her protected resources**. .. autosummary:: :nosignatures: authomatic.setup authomatic.login authomatic.provider_id authomatic.access authomatic.async_access authomatic.credentials authomatic.request_elements authomatic.backend """ import six from .core import Authomatic, setup, login, provider_id, access, async_access, credentials, request_elements, backend
# -*- coding: utf-8 -*- """ This is the only interface that you should ever need to get a **user** logged in, get **his/her** info and credentials, deserialize the credentials and access **his/her protected resources**. .. autosummary:: :nosignatures: authomatic.setup authomatic.login authomatic.provider_id authomatic.access authomatic.async_access authomatic.credentials authomatic.request_elements authomatic.backend """ from . import six from .core import Authomatic, setup, login, provider_id, access, async_access, credentials, request_elements, backend
Correct six local import for python3 compatibility
Correct six local import for python3 compatibility
Python
mit
liorshahverdi/authomatic,authomatic/authomatic,kshitizanand/authomatic,artitj/authomatic,farcaz/authomatic,liorshahverdi/authomatic,scorphus/authomatic,leadbrick/authomatic,erasanimoulika/authomatic,c24b/authomatic,authomatic/authomatic,erasanimoulika/authomatic,dougchestnut/authomatic,scorphus/authomatic,vivek8943/authomatic,liorshahverdi/authomatic,kshitizanand/authomatic,daherk2/authomatic,farcaz/authomatic,garg91824/authomatic,vivek8943/authomatic,farcaz/authomatic,artitj/authomatic,peterhudec/authomatic,erasanimoulika/authomatic,peterhudec/authomatic,scorphus/authomatic,leadbrick/authomatic,dougchestnut/authomatic,leadbrick/authomatic,peterhudec/authomatic,kshitizanand/authomatic,daherk2/authomatic,garg91824/authomatic,farcaz/authomatic,scorphus/authomatic,jasco/authomatic,c24b/authomatic,garg91824/authomatic,dougchestnut/authomatic,muravjov/authomatic,farcaz/authomatic,vivek8943/authomatic,jasco/authomatic,muravjov/authomatic,dougchestnut/authomatic,muravjov/authomatic,daherk2/authomatic,c24b/authomatic,leadbrick/authomatic,dougchestnut/authomatic,leadbrick/authomatic,scorphus/authomatic,artitj/authomatic,jasco/authomatic,authomatic/authomatic
# -*- coding: utf-8 -*- """ This is the only interface that you should ever need to get a **user** logged in, get **his/her** info and credentials, deserialize the credentials and access **his/her protected resources**. .. autosummary:: :nosignatures: authomatic.setup authomatic.login authomatic.provider_id authomatic.access authomatic.async_access authomatic.credentials authomatic.request_elements authomatic.backend """ import six from .core import Authomatic, setup, login, provider_id, access, async_access, credentials, request_elements, backend Correct six local import for python3 compatibility
# -*- coding: utf-8 -*- """ This is the only interface that you should ever need to get a **user** logged in, get **his/her** info and credentials, deserialize the credentials and access **his/her protected resources**. .. autosummary:: :nosignatures: authomatic.setup authomatic.login authomatic.provider_id authomatic.access authomatic.async_access authomatic.credentials authomatic.request_elements authomatic.backend """ from . import six from .core import Authomatic, setup, login, provider_id, access, async_access, credentials, request_elements, backend
<commit_before># -*- coding: utf-8 -*- """ This is the only interface that you should ever need to get a **user** logged in, get **his/her** info and credentials, deserialize the credentials and access **his/her protected resources**. .. autosummary:: :nosignatures: authomatic.setup authomatic.login authomatic.provider_id authomatic.access authomatic.async_access authomatic.credentials authomatic.request_elements authomatic.backend """ import six from .core import Authomatic, setup, login, provider_id, access, async_access, credentials, request_elements, backend <commit_msg>Correct six local import for python3 compatibility<commit_after>
# -*- coding: utf-8 -*- """ This is the only interface that you should ever need to get a **user** logged in, get **his/her** info and credentials, deserialize the credentials and access **his/her protected resources**. .. autosummary:: :nosignatures: authomatic.setup authomatic.login authomatic.provider_id authomatic.access authomatic.async_access authomatic.credentials authomatic.request_elements authomatic.backend """ from . import six from .core import Authomatic, setup, login, provider_id, access, async_access, credentials, request_elements, backend
# -*- coding: utf-8 -*- """ This is the only interface that you should ever need to get a **user** logged in, get **his/her** info and credentials, deserialize the credentials and access **his/her protected resources**. .. autosummary:: :nosignatures: authomatic.setup authomatic.login authomatic.provider_id authomatic.access authomatic.async_access authomatic.credentials authomatic.request_elements authomatic.backend """ import six from .core import Authomatic, setup, login, provider_id, access, async_access, credentials, request_elements, backend Correct six local import for python3 compatibility# -*- coding: utf-8 -*- """ This is the only interface that you should ever need to get a **user** logged in, get **his/her** info and credentials, deserialize the credentials and access **his/her protected resources**. .. autosummary:: :nosignatures: authomatic.setup authomatic.login authomatic.provider_id authomatic.access authomatic.async_access authomatic.credentials authomatic.request_elements authomatic.backend """ from . import six from .core import Authomatic, setup, login, provider_id, access, async_access, credentials, request_elements, backend
<commit_before># -*- coding: utf-8 -*- """ This is the only interface that you should ever need to get a **user** logged in, get **his/her** info and credentials, deserialize the credentials and access **his/her protected resources**. .. autosummary:: :nosignatures: authomatic.setup authomatic.login authomatic.provider_id authomatic.access authomatic.async_access authomatic.credentials authomatic.request_elements authomatic.backend """ import six from .core import Authomatic, setup, login, provider_id, access, async_access, credentials, request_elements, backend <commit_msg>Correct six local import for python3 compatibility<commit_after># -*- coding: utf-8 -*- """ This is the only interface that you should ever need to get a **user** logged in, get **his/her** info and credentials, deserialize the credentials and access **his/her protected resources**. .. autosummary:: :nosignatures: authomatic.setup authomatic.login authomatic.provider_id authomatic.access authomatic.async_access authomatic.credentials authomatic.request_elements authomatic.backend """ from . import six from .core import Authomatic, setup, login, provider_id, access, async_access, credentials, request_elements, backend
13f4373fc415faba717033f0e8b87a7c5cd83033
slackclient/_slackrequest.py
slackclient/_slackrequest.py
import json import requests import six class SlackRequest(object): @staticmethod def do(token, request="?", post_data=None, domain="slack.com"): post_data = post_data or {} for k, v in six.iteritems(post_data): if not isinstance(v, six.string_types): post_data[k] = json.dumps(v) url = 'https://{0}/api/{1}'.format(domain, request) post_data['token'] = token return requests.post(url, data=post_data)
import json import requests import six class SlackRequest(object): @staticmethod def do(token, request="?", post_data=None, domain="slack.com"): post_data = post_data or {} for k, v in six.iteritems(post_data): if not isinstance(v, six.string_types): post_data[k] = json.dumps(v) url = 'https://{0}/api/{1}'.format(domain, request) post_data['token'] = token files = {'file': post_data.pop('file')} if 'file' in post_data else None return requests.post(url, data=post_data, files=files)
Add support for files.upload API call.
Add support for files.upload API call. Closes #64, #88.
Python
mit
slackapi/python-slackclient,slackhq/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient
import json import requests import six class SlackRequest(object): @staticmethod def do(token, request="?", post_data=None, domain="slack.com"): post_data = post_data or {} for k, v in six.iteritems(post_data): if not isinstance(v, six.string_types): post_data[k] = json.dumps(v) url = 'https://{0}/api/{1}'.format(domain, request) post_data['token'] = token return requests.post(url, data=post_data) Add support for files.upload API call. Closes #64, #88.
import json import requests import six class SlackRequest(object): @staticmethod def do(token, request="?", post_data=None, domain="slack.com"): post_data = post_data or {} for k, v in six.iteritems(post_data): if not isinstance(v, six.string_types): post_data[k] = json.dumps(v) url = 'https://{0}/api/{1}'.format(domain, request) post_data['token'] = token files = {'file': post_data.pop('file')} if 'file' in post_data else None return requests.post(url, data=post_data, files=files)
<commit_before>import json import requests import six class SlackRequest(object): @staticmethod def do(token, request="?", post_data=None, domain="slack.com"): post_data = post_data or {} for k, v in six.iteritems(post_data): if not isinstance(v, six.string_types): post_data[k] = json.dumps(v) url = 'https://{0}/api/{1}'.format(domain, request) post_data['token'] = token return requests.post(url, data=post_data) <commit_msg>Add support for files.upload API call. Closes #64, #88.<commit_after>
import json import requests import six class SlackRequest(object): @staticmethod def do(token, request="?", post_data=None, domain="slack.com"): post_data = post_data or {} for k, v in six.iteritems(post_data): if not isinstance(v, six.string_types): post_data[k] = json.dumps(v) url = 'https://{0}/api/{1}'.format(domain, request) post_data['token'] = token files = {'file': post_data.pop('file')} if 'file' in post_data else None return requests.post(url, data=post_data, files=files)
import json import requests import six class SlackRequest(object): @staticmethod def do(token, request="?", post_data=None, domain="slack.com"): post_data = post_data or {} for k, v in six.iteritems(post_data): if not isinstance(v, six.string_types): post_data[k] = json.dumps(v) url = 'https://{0}/api/{1}'.format(domain, request) post_data['token'] = token return requests.post(url, data=post_data) Add support for files.upload API call. Closes #64, #88.import json import requests import six class SlackRequest(object): @staticmethod def do(token, request="?", post_data=None, domain="slack.com"): post_data = post_data or {} for k, v in six.iteritems(post_data): if not isinstance(v, six.string_types): post_data[k] = json.dumps(v) url = 'https://{0}/api/{1}'.format(domain, request) post_data['token'] = token files = {'file': post_data.pop('file')} if 'file' in post_data else None return requests.post(url, data=post_data, files=files)
<commit_before>import json import requests import six class SlackRequest(object): @staticmethod def do(token, request="?", post_data=None, domain="slack.com"): post_data = post_data or {} for k, v in six.iteritems(post_data): if not isinstance(v, six.string_types): post_data[k] = json.dumps(v) url = 'https://{0}/api/{1}'.format(domain, request) post_data['token'] = token return requests.post(url, data=post_data) <commit_msg>Add support for files.upload API call. Closes #64, #88.<commit_after>import json import requests import six class SlackRequest(object): @staticmethod def do(token, request="?", post_data=None, domain="slack.com"): post_data = post_data or {} for k, v in six.iteritems(post_data): if not isinstance(v, six.string_types): post_data[k] = json.dumps(v) url = 'https://{0}/api/{1}'.format(domain, request) post_data['token'] = token files = {'file': post_data.pop('file')} if 'file' in post_data else None return requests.post(url, data=post_data, files=files)
04640beab352f8797d68c33f940e920d2ed9ca6b
nolang/objects/list.py
nolang/objects/list.py
from nolang.error import AppError from nolang.objects.root import W_Root class W_ListObject(W_Root): def __init__(self, w_items): self._w_items = w_items def str(self, space): return '[' + ', '.join([space.str(i) for i in self._w_items]) + ']' def len(self, space): return len(self._w_items) def unwrap_index(self, space, w_index): try: i = space.int_w(w_index) except AppError as ae: if space.type(ae.w_exception) == space.w_typeerror: raise space.apperr(space.w_typeerror, 'list index must be int') raise if i < 0 or i >= len(self._w_items): raise space.apperr(space.w_indexerror, 'list index out of range') return i def getitem(self, space, w_index): return self._w_items[self.unwrap_index(space, w_index)] def setitem(self, space, w_index, w_value): self._w_items[self.unwrap_index(space, w_index)] = w_value
from nolang.error import AppError from nolang.objects.root import W_Root class W_ListObject(W_Root): def __init__(self, w_items): self._w_items = w_items def str(self, space): return '[' + ', '.join([space.str(i) for i in self._w_items]) + ']' def len(self, space): return len(self._w_items) def unwrap_index(self, space, w_index): try: i = space.int_w(w_index) except AppError as ae: if space.type(ae.w_exception) is space.w_typeerror: raise space.apperr(space.w_typeerror, 'list index must be int') raise if i < 0 or i >= len(self._w_items): raise space.apperr(space.w_indexerror, 'list index out of range') return i def getitem(self, space, w_index): return self._w_items[self.unwrap_index(space, w_index)] def setitem(self, space, w_index, w_value): self._w_items[self.unwrap_index(space, w_index)] = w_value
Use is instead of == for types.
Use is instead of == for types.
Python
mit
fijal/quill
from nolang.error import AppError from nolang.objects.root import W_Root class W_ListObject(W_Root): def __init__(self, w_items): self._w_items = w_items def str(self, space): return '[' + ', '.join([space.str(i) for i in self._w_items]) + ']' def len(self, space): return len(self._w_items) def unwrap_index(self, space, w_index): try: i = space.int_w(w_index) except AppError as ae: if space.type(ae.w_exception) == space.w_typeerror: raise space.apperr(space.w_typeerror, 'list index must be int') raise if i < 0 or i >= len(self._w_items): raise space.apperr(space.w_indexerror, 'list index out of range') return i def getitem(self, space, w_index): return self._w_items[self.unwrap_index(space, w_index)] def setitem(self, space, w_index, w_value): self._w_items[self.unwrap_index(space, w_index)] = w_value Use is instead of == for types.
from nolang.error import AppError from nolang.objects.root import W_Root class W_ListObject(W_Root): def __init__(self, w_items): self._w_items = w_items def str(self, space): return '[' + ', '.join([space.str(i) for i in self._w_items]) + ']' def len(self, space): return len(self._w_items) def unwrap_index(self, space, w_index): try: i = space.int_w(w_index) except AppError as ae: if space.type(ae.w_exception) is space.w_typeerror: raise space.apperr(space.w_typeerror, 'list index must be int') raise if i < 0 or i >= len(self._w_items): raise space.apperr(space.w_indexerror, 'list index out of range') return i def getitem(self, space, w_index): return self._w_items[self.unwrap_index(space, w_index)] def setitem(self, space, w_index, w_value): self._w_items[self.unwrap_index(space, w_index)] = w_value
<commit_before>from nolang.error import AppError from nolang.objects.root import W_Root class W_ListObject(W_Root): def __init__(self, w_items): self._w_items = w_items def str(self, space): return '[' + ', '.join([space.str(i) for i in self._w_items]) + ']' def len(self, space): return len(self._w_items) def unwrap_index(self, space, w_index): try: i = space.int_w(w_index) except AppError as ae: if space.type(ae.w_exception) == space.w_typeerror: raise space.apperr(space.w_typeerror, 'list index must be int') raise if i < 0 or i >= len(self._w_items): raise space.apperr(space.w_indexerror, 'list index out of range') return i def getitem(self, space, w_index): return self._w_items[self.unwrap_index(space, w_index)] def setitem(self, space, w_index, w_value): self._w_items[self.unwrap_index(space, w_index)] = w_value <commit_msg>Use is instead of == for types.<commit_after>
from nolang.error import AppError from nolang.objects.root import W_Root class W_ListObject(W_Root): def __init__(self, w_items): self._w_items = w_items def str(self, space): return '[' + ', '.join([space.str(i) for i in self._w_items]) + ']' def len(self, space): return len(self._w_items) def unwrap_index(self, space, w_index): try: i = space.int_w(w_index) except AppError as ae: if space.type(ae.w_exception) is space.w_typeerror: raise space.apperr(space.w_typeerror, 'list index must be int') raise if i < 0 or i >= len(self._w_items): raise space.apperr(space.w_indexerror, 'list index out of range') return i def getitem(self, space, w_index): return self._w_items[self.unwrap_index(space, w_index)] def setitem(self, space, w_index, w_value): self._w_items[self.unwrap_index(space, w_index)] = w_value
from nolang.error import AppError from nolang.objects.root import W_Root class W_ListObject(W_Root): def __init__(self, w_items): self._w_items = w_items def str(self, space): return '[' + ', '.join([space.str(i) for i in self._w_items]) + ']' def len(self, space): return len(self._w_items) def unwrap_index(self, space, w_index): try: i = space.int_w(w_index) except AppError as ae: if space.type(ae.w_exception) == space.w_typeerror: raise space.apperr(space.w_typeerror, 'list index must be int') raise if i < 0 or i >= len(self._w_items): raise space.apperr(space.w_indexerror, 'list index out of range') return i def getitem(self, space, w_index): return self._w_items[self.unwrap_index(space, w_index)] def setitem(self, space, w_index, w_value): self._w_items[self.unwrap_index(space, w_index)] = w_value Use is instead of == for types.from nolang.error import AppError from nolang.objects.root import W_Root class W_ListObject(W_Root): def __init__(self, w_items): self._w_items = w_items def str(self, space): return '[' + ', '.join([space.str(i) for i in self._w_items]) + ']' def len(self, space): return len(self._w_items) def unwrap_index(self, space, w_index): try: i = space.int_w(w_index) except AppError as ae: if space.type(ae.w_exception) is space.w_typeerror: raise space.apperr(space.w_typeerror, 'list index must be int') raise if i < 0 or i >= len(self._w_items): raise space.apperr(space.w_indexerror, 'list index out of range') return i def getitem(self, space, w_index): return self._w_items[self.unwrap_index(space, w_index)] def setitem(self, space, w_index, w_value): self._w_items[self.unwrap_index(space, w_index)] = w_value
<commit_before>from nolang.error import AppError from nolang.objects.root import W_Root class W_ListObject(W_Root): def __init__(self, w_items): self._w_items = w_items def str(self, space): return '[' + ', '.join([space.str(i) for i in self._w_items]) + ']' def len(self, space): return len(self._w_items) def unwrap_index(self, space, w_index): try: i = space.int_w(w_index) except AppError as ae: if space.type(ae.w_exception) == space.w_typeerror: raise space.apperr(space.w_typeerror, 'list index must be int') raise if i < 0 or i >= len(self._w_items): raise space.apperr(space.w_indexerror, 'list index out of range') return i def getitem(self, space, w_index): return self._w_items[self.unwrap_index(space, w_index)] def setitem(self, space, w_index, w_value): self._w_items[self.unwrap_index(space, w_index)] = w_value <commit_msg>Use is instead of == for types.<commit_after>from nolang.error import AppError from nolang.objects.root import W_Root class W_ListObject(W_Root): def __init__(self, w_items): self._w_items = w_items def str(self, space): return '[' + ', '.join([space.str(i) for i in self._w_items]) + ']' def len(self, space): return len(self._w_items) def unwrap_index(self, space, w_index): try: i = space.int_w(w_index) except AppError as ae: if space.type(ae.w_exception) is space.w_typeerror: raise space.apperr(space.w_typeerror, 'list index must be int') raise if i < 0 or i >= len(self._w_items): raise space.apperr(space.w_indexerror, 'list index out of range') return i def getitem(self, space, w_index): return self._w_items[self.unwrap_index(space, w_index)] def setitem(self, space, w_index, w_value): self._w_items[self.unwrap_index(space, w_index)] = w_value
0676485054c01abb41b95901c1af0af63fdcb650
AFQ/utils/volume.py
AFQ/utils/volume.py
import scipy.ndimage as ndim from skimage.filters import gaussian def patch_up_roi(roi, sigma=0.5): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. Returns ------- ROI after dilation and hole-filling """ return ndim.binary_fill_holes( ndim.binary_dilation(gaussian(roi, sigma=sigma)).astype(float))
import scipy.ndimage as ndim from skimage.filters import gaussian def patch_up_roi(roi, sigma=0.5, truncate=2): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. truncate : float The truncation for the Gaussian. Returns ------- ROI after dilation and hole-filling """ return ndim.binary_fill_holes( ndim.binary_dilation(gaussian(roi, sigma=sigma, truncate=truncate)).astype(float))
Truncate ROI-smoothing Gaussian at 2 STD per default.
Truncate ROI-smoothing Gaussian at 2 STD per default.
Python
bsd-2-clause
yeatmanlab/pyAFQ,yeatmanlab/pyAFQ,arokem/pyAFQ,arokem/pyAFQ
import scipy.ndimage as ndim from skimage.filters import gaussian def patch_up_roi(roi, sigma=0.5): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. Returns ------- ROI after dilation and hole-filling """ return ndim.binary_fill_holes( ndim.binary_dilation(gaussian(roi, sigma=sigma)).astype(float)) Truncate ROI-smoothing Gaussian at 2 STD per default.
import scipy.ndimage as ndim from skimage.filters import gaussian def patch_up_roi(roi, sigma=0.5, truncate=2): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. truncate : float The truncation for the Gaussian. Returns ------- ROI after dilation and hole-filling """ return ndim.binary_fill_holes( ndim.binary_dilation(gaussian(roi, sigma=sigma, truncate=truncate)).astype(float))
<commit_before>import scipy.ndimage as ndim from skimage.filters import gaussian def patch_up_roi(roi, sigma=0.5): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. Returns ------- ROI after dilation and hole-filling """ return ndim.binary_fill_holes( ndim.binary_dilation(gaussian(roi, sigma=sigma)).astype(float)) <commit_msg>Truncate ROI-smoothing Gaussian at 2 STD per default.<commit_after>
import scipy.ndimage as ndim from skimage.filters import gaussian def patch_up_roi(roi, sigma=0.5, truncate=2): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. truncate : float The truncation for the Gaussian. Returns ------- ROI after dilation and hole-filling """ return ndim.binary_fill_holes( ndim.binary_dilation(gaussian(roi, sigma=sigma, truncate=truncate)).astype(float))
import scipy.ndimage as ndim from skimage.filters import gaussian def patch_up_roi(roi, sigma=0.5): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. Returns ------- ROI after dilation and hole-filling """ return ndim.binary_fill_holes( ndim.binary_dilation(gaussian(roi, sigma=sigma)).astype(float)) Truncate ROI-smoothing Gaussian at 2 STD per default.import scipy.ndimage as ndim from skimage.filters import gaussian def patch_up_roi(roi, sigma=0.5, truncate=2): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. truncate : float The truncation for the Gaussian. Returns ------- ROI after dilation and hole-filling """ return ndim.binary_fill_holes( ndim.binary_dilation(gaussian(roi, sigma=sigma, truncate=truncate)).astype(float))
<commit_before>import scipy.ndimage as ndim from skimage.filters import gaussian def patch_up_roi(roi, sigma=0.5): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. Returns ------- ROI after dilation and hole-filling """ return ndim.binary_fill_holes( ndim.binary_dilation(gaussian(roi, sigma=sigma)).astype(float)) <commit_msg>Truncate ROI-smoothing Gaussian at 2 STD per default.<commit_after>import scipy.ndimage as ndim from skimage.filters import gaussian def patch_up_roi(roi, sigma=0.5, truncate=2): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. truncate : float The truncation for the Gaussian. Returns ------- ROI after dilation and hole-filling """ return ndim.binary_fill_holes( ndim.binary_dilation(gaussian(roi, sigma=sigma, truncate=truncate)).astype(float))
11528044c054ac8f1d65c1b64c2e76fbe22dad20
lpthw/ex24.py
lpthw/ex24.py
print "Let's practice everything." print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs." poem = """ \t the lovely world wtih logic so firmly planted cannot discern \n the needs of love nor comprehend passion from intuition and requires an explanation \n\t\twhere there is none. """ print "--------------" print poem print "--------------" five = 10 - 2 + 3 - 6 print "This should be five: %s" % five def secret_formula(started): """ This is not the Krabby Patty Secret Formula (tm) """ jelly_beans = started * 500 jars = jelly_beans / 1000 crates = jars / 100 return jelly_beans, jars, crates start_point = 10000 beans, jars, crates = secret_formula(start_point) print "With a starting point of %d" % start_point print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates) start_point = start_point / 10 print "We can also do that this way:" # This part is pretty darn cool. \/ print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
print "Let's practice everything." print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs." poem = """ \t the lovely world wtih logic so firmly planted cannot discern \n the needs of love nor comprehend passion from intuition and requires an explanation \n\t\twhere there is none. """ print "--------------" print poem print "--------------" five = 10 - 2 + 3 - 6 print "This should be five: %s" % five def secret_formula(started): """ This is not the Krabby Patty Secret Formula (tm) """ jelly_beans = started * 500 jars = jelly_beans / 1000 crates = jars / 100 return jelly_beans, jars, crates start_point = 10000 beans, jars, crates = secret_formula(start_point) print "With a starting point of %d" % start_point print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates) start_point = start_point / 10 print "We can also do that this way:" # This part is pretty darn cool. \/ print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
Change single quote to double quote to make it run -.-
Change single quote to double quote to make it run -.-
Python
mit
jaredmanning/learning,jaredmanning/learning
print "Let's practice everything." print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs." poem = """ \t the lovely world wtih logic so firmly planted cannot discern \n the needs of love nor comprehend passion from intuition and requires an explanation \n\t\twhere there is none. """ print "--------------" print poem print "--------------" five = 10 - 2 + 3 - 6 print "This should be five: %s" % five def secret_formula(started): """ This is not the Krabby Patty Secret Formula (tm) """ jelly_beans = started * 500 jars = jelly_beans / 1000 crates = jars / 100 return jelly_beans, jars, crates start_point = 10000 beans, jars, crates = secret_formula(start_point) print "With a starting point of %d" % start_point print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates) start_point = start_point / 10 print "We can also do that this way:" # This part is pretty darn cool. \/ print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point) Change single quote to double quote to make it run -.-
print "Let's practice everything." print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs." poem = """ \t the lovely world wtih logic so firmly planted cannot discern \n the needs of love nor comprehend passion from intuition and requires an explanation \n\t\twhere there is none. """ print "--------------" print poem print "--------------" five = 10 - 2 + 3 - 6 print "This should be five: %s" % five def secret_formula(started): """ This is not the Krabby Patty Secret Formula (tm) """ jelly_beans = started * 500 jars = jelly_beans / 1000 crates = jars / 100 return jelly_beans, jars, crates start_point = 10000 beans, jars, crates = secret_formula(start_point) print "With a starting point of %d" % start_point print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates) start_point = start_point / 10 print "We can also do that this way:" # This part is pretty darn cool. \/ print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
<commit_before>print "Let's practice everything." print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs." poem = """ \t the lovely world wtih logic so firmly planted cannot discern \n the needs of love nor comprehend passion from intuition and requires an explanation \n\t\twhere there is none. """ print "--------------" print poem print "--------------" five = 10 - 2 + 3 - 6 print "This should be five: %s" % five def secret_formula(started): """ This is not the Krabby Patty Secret Formula (tm) """ jelly_beans = started * 500 jars = jelly_beans / 1000 crates = jars / 100 return jelly_beans, jars, crates start_point = 10000 beans, jars, crates = secret_formula(start_point) print "With a starting point of %d" % start_point print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates) start_point = start_point / 10 print "We can also do that this way:" # This part is pretty darn cool. \/ print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point) <commit_msg>Change single quote to double quote to make it run -.-<commit_after>
print "Let's practice everything." print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs." poem = """ \t the lovely world wtih logic so firmly planted cannot discern \n the needs of love nor comprehend passion from intuition and requires an explanation \n\t\twhere there is none. """ print "--------------" print poem print "--------------" five = 10 - 2 + 3 - 6 print "This should be five: %s" % five def secret_formula(started): """ This is not the Krabby Patty Secret Formula (tm) """ jelly_beans = started * 500 jars = jelly_beans / 1000 crates = jars / 100 return jelly_beans, jars, crates start_point = 10000 beans, jars, crates = secret_formula(start_point) print "With a starting point of %d" % start_point print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates) start_point = start_point / 10 print "We can also do that this way:" # This part is pretty darn cool. \/ print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
print "Let's practice everything." print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs." poem = """ \t the lovely world wtih logic so firmly planted cannot discern \n the needs of love nor comprehend passion from intuition and requires an explanation \n\t\twhere there is none. """ print "--------------" print poem print "--------------" five = 10 - 2 + 3 - 6 print "This should be five: %s" % five def secret_formula(started): """ This is not the Krabby Patty Secret Formula (tm) """ jelly_beans = started * 500 jars = jelly_beans / 1000 crates = jars / 100 return jelly_beans, jars, crates start_point = 10000 beans, jars, crates = secret_formula(start_point) print "With a starting point of %d" % start_point print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates) start_point = start_point / 10 print "We can also do that this way:" # This part is pretty darn cool. \/ print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point) Change single quote to double quote to make it run -.-print "Let's practice everything." print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs." poem = """ \t the lovely world wtih logic so firmly planted cannot discern \n the needs of love nor comprehend passion from intuition and requires an explanation \n\t\twhere there is none. """ print "--------------" print poem print "--------------" five = 10 - 2 + 3 - 6 print "This should be five: %s" % five def secret_formula(started): """ This is not the Krabby Patty Secret Formula (tm) """ jelly_beans = started * 500 jars = jelly_beans / 1000 crates = jars / 100 return jelly_beans, jars, crates start_point = 10000 beans, jars, crates = secret_formula(start_point) print "With a starting point of %d" % start_point print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates) start_point = start_point / 10 print "We can also do that this way:" # This part is pretty darn cool. \/ print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
<commit_before>print "Let's practice everything." print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs." poem = """ \t the lovely world wtih logic so firmly planted cannot discern \n the needs of love nor comprehend passion from intuition and requires an explanation \n\t\twhere there is none. """ print "--------------" print poem print "--------------" five = 10 - 2 + 3 - 6 print "This should be five: %s" % five def secret_formula(started): """ This is not the Krabby Patty Secret Formula (tm) """ jelly_beans = started * 500 jars = jelly_beans / 1000 crates = jars / 100 return jelly_beans, jars, crates start_point = 10000 beans, jars, crates = secret_formula(start_point) print "With a starting point of %d" % start_point print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates) start_point = start_point / 10 print "We can also do that this way:" # This part is pretty darn cool. \/ print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point) <commit_msg>Change single quote to double quote to make it run -.-<commit_after>print "Let's practice everything." print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs." poem = """ \t the lovely world wtih logic so firmly planted cannot discern \n the needs of love nor comprehend passion from intuition and requires an explanation \n\t\twhere there is none. """ print "--------------" print poem print "--------------" five = 10 - 2 + 3 - 6 print "This should be five: %s" % five def secret_formula(started): """ This is not the Krabby Patty Secret Formula (tm) """ jelly_beans = started * 500 jars = jelly_beans / 1000 crates = jars / 100 return jelly_beans, jars, crates start_point = 10000 beans, jars, crates = secret_formula(start_point) print "With a starting point of %d" % start_point print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates) start_point = start_point / 10 print "We can also do that this way:" # This part is pretty darn cool. \/ print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
a24fe6cb58439d295455116574463ebdaf621f2c
mgsv_names.py
mgsv_names.py
import random, os global adjectives, animals, rares with open(os.path.join(os.path.dirname(__file__), 'adjectives.txt')) as f: adjectives = f.readlines() with open(os.path.join(os.path.dirname(__file__), 'animals.txt')) as f: animals = f.readlines() with open(os.path.join(os.path.dirname(__file__), 'rares.txt')) as f: rares = f.readlines() uncommons = { # Adjectives: 'master': 'miller', 'raging': 'bull', 'hidden': 'dragon', 'humming': 'bird', 'spicy': 'sandworm', # Animals: 'ocelot': 'revolver', 'lion': 'snooping', 'tiger': 'crouching', 'hippo': 'hungry', 'falcon': 'punching', } def generate_name(): adj = random.choice(adjectives).strip() anim = random.choice(animals).strip() r = random.random() if r < 0.001 or r >= 0.999: return random.choice(rares).strip() elif r < 0.3 and adj in uncommons: return ' '.join((adj, uncommons[adj])) elif r >= 0.7 and anim in uncommons: return ' '.join((uncommons[anim], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': print(generate_name())
from __future__ import unicode_literals, print_function import sqlite3, os, random _select = 'select {} from {} order by random() limit 1' _uncommon_select = 'select value from uncommons where key=?' def generate_name(): conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db')) cursor = conn.cursor() adj = cursor.execute(_select.format('adjective', 'adjectives')).fetchone()[0] anim = cursor.execute(_select.format('animal', 'animals')).fetchone()[0] rare = cursor.execute(_select.format('name', 'rares')).fetchone()[0] uncommon_anim = cursor.execute(_uncommon_select, [adj]).fetchone() uncommon_adj = cursor.execute(_uncommon_select, [anim]).fetchone() conn.close() r = random.random() if r < 0.001 or r >= 0.999: return rare elif r < 0.3 and uncommon_anim is not None: return ' '.join((adj, uncommon_anim[0])) elif r >= 0.7 and uncommon_adj is not None: return ' '.join((uncommon_adj[0], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': print(generate_name())
Replace text files with database access.
Replace text files with database access.
Python
unlicense
rotated8/mgsv_names
import random, os global adjectives, animals, rares with open(os.path.join(os.path.dirname(__file__), 'adjectives.txt')) as f: adjectives = f.readlines() with open(os.path.join(os.path.dirname(__file__), 'animals.txt')) as f: animals = f.readlines() with open(os.path.join(os.path.dirname(__file__), 'rares.txt')) as f: rares = f.readlines() uncommons = { # Adjectives: 'master': 'miller', 'raging': 'bull', 'hidden': 'dragon', 'humming': 'bird', 'spicy': 'sandworm', # Animals: 'ocelot': 'revolver', 'lion': 'snooping', 'tiger': 'crouching', 'hippo': 'hungry', 'falcon': 'punching', } def generate_name(): adj = random.choice(adjectives).strip() anim = random.choice(animals).strip() r = random.random() if r < 0.001 or r >= 0.999: return random.choice(rares).strip() elif r < 0.3 and adj in uncommons: return ' '.join((adj, uncommons[adj])) elif r >= 0.7 and anim in uncommons: return ' '.join((uncommons[anim], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': print(generate_name()) Replace text files with database access.
from __future__ import unicode_literals, print_function import sqlite3, os, random _select = 'select {} from {} order by random() limit 1' _uncommon_select = 'select value from uncommons where key=?' def generate_name(): conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db')) cursor = conn.cursor() adj = cursor.execute(_select.format('adjective', 'adjectives')).fetchone()[0] anim = cursor.execute(_select.format('animal', 'animals')).fetchone()[0] rare = cursor.execute(_select.format('name', 'rares')).fetchone()[0] uncommon_anim = cursor.execute(_uncommon_select, [adj]).fetchone() uncommon_adj = cursor.execute(_uncommon_select, [anim]).fetchone() conn.close() r = random.random() if r < 0.001 or r >= 0.999: return rare elif r < 0.3 and uncommon_anim is not None: return ' '.join((adj, uncommon_anim[0])) elif r >= 0.7 and uncommon_adj is not None: return ' '.join((uncommon_adj[0], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': print(generate_name())
<commit_before>import random, os global adjectives, animals, rares with open(os.path.join(os.path.dirname(__file__), 'adjectives.txt')) as f: adjectives = f.readlines() with open(os.path.join(os.path.dirname(__file__), 'animals.txt')) as f: animals = f.readlines() with open(os.path.join(os.path.dirname(__file__), 'rares.txt')) as f: rares = f.readlines() uncommons = { # Adjectives: 'master': 'miller', 'raging': 'bull', 'hidden': 'dragon', 'humming': 'bird', 'spicy': 'sandworm', # Animals: 'ocelot': 'revolver', 'lion': 'snooping', 'tiger': 'crouching', 'hippo': 'hungry', 'falcon': 'punching', } def generate_name(): adj = random.choice(adjectives).strip() anim = random.choice(animals).strip() r = random.random() if r < 0.001 or r >= 0.999: return random.choice(rares).strip() elif r < 0.3 and adj in uncommons: return ' '.join((adj, uncommons[adj])) elif r >= 0.7 and anim in uncommons: return ' '.join((uncommons[anim], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': print(generate_name()) <commit_msg>Replace text files with database access.<commit_after>
from __future__ import unicode_literals, print_function import sqlite3, os, random _select = 'select {} from {} order by random() limit 1' _uncommon_select = 'select value from uncommons where key=?' def generate_name(): conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db')) cursor = conn.cursor() adj = cursor.execute(_select.format('adjective', 'adjectives')).fetchone()[0] anim = cursor.execute(_select.format('animal', 'animals')).fetchone()[0] rare = cursor.execute(_select.format('name', 'rares')).fetchone()[0] uncommon_anim = cursor.execute(_uncommon_select, [adj]).fetchone() uncommon_adj = cursor.execute(_uncommon_select, [anim]).fetchone() conn.close() r = random.random() if r < 0.001 or r >= 0.999: return rare elif r < 0.3 and uncommon_anim is not None: return ' '.join((adj, uncommon_anim[0])) elif r >= 0.7 and uncommon_adj is not None: return ' '.join((uncommon_adj[0], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': print(generate_name())
import random, os global adjectives, animals, rares with open(os.path.join(os.path.dirname(__file__), 'adjectives.txt')) as f: adjectives = f.readlines() with open(os.path.join(os.path.dirname(__file__), 'animals.txt')) as f: animals = f.readlines() with open(os.path.join(os.path.dirname(__file__), 'rares.txt')) as f: rares = f.readlines() uncommons = { # Adjectives: 'master': 'miller', 'raging': 'bull', 'hidden': 'dragon', 'humming': 'bird', 'spicy': 'sandworm', # Animals: 'ocelot': 'revolver', 'lion': 'snooping', 'tiger': 'crouching', 'hippo': 'hungry', 'falcon': 'punching', } def generate_name(): adj = random.choice(adjectives).strip() anim = random.choice(animals).strip() r = random.random() if r < 0.001 or r >= 0.999: return random.choice(rares).strip() elif r < 0.3 and adj in uncommons: return ' '.join((adj, uncommons[adj])) elif r >= 0.7 and anim in uncommons: return ' '.join((uncommons[anim], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': print(generate_name()) Replace text files with database access.from __future__ import unicode_literals, print_function import sqlite3, os, random _select = 'select {} from {} order by random() limit 1' _uncommon_select = 'select value from uncommons where key=?' def generate_name(): conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db')) cursor = conn.cursor() adj = cursor.execute(_select.format('adjective', 'adjectives')).fetchone()[0] anim = cursor.execute(_select.format('animal', 'animals')).fetchone()[0] rare = cursor.execute(_select.format('name', 'rares')).fetchone()[0] uncommon_anim = cursor.execute(_uncommon_select, [adj]).fetchone() uncommon_adj = cursor.execute(_uncommon_select, [anim]).fetchone() conn.close() r = random.random() if r < 0.001 or r >= 0.999: return rare elif r < 0.3 and uncommon_anim is not None: return ' '.join((adj, uncommon_anim[0])) elif r >= 0.7 and uncommon_adj is not None: return ' '.join((uncommon_adj[0], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': print(generate_name())
<commit_before>import random, os global adjectives, animals, rares with open(os.path.join(os.path.dirname(__file__), 'adjectives.txt')) as f: adjectives = f.readlines() with open(os.path.join(os.path.dirname(__file__), 'animals.txt')) as f: animals = f.readlines() with open(os.path.join(os.path.dirname(__file__), 'rares.txt')) as f: rares = f.readlines() uncommons = { # Adjectives: 'master': 'miller', 'raging': 'bull', 'hidden': 'dragon', 'humming': 'bird', 'spicy': 'sandworm', # Animals: 'ocelot': 'revolver', 'lion': 'snooping', 'tiger': 'crouching', 'hippo': 'hungry', 'falcon': 'punching', } def generate_name(): adj = random.choice(adjectives).strip() anim = random.choice(animals).strip() r = random.random() if r < 0.001 or r >= 0.999: return random.choice(rares).strip() elif r < 0.3 and adj in uncommons: return ' '.join((adj, uncommons[adj])) elif r >= 0.7 and anim in uncommons: return ' '.join((uncommons[anim], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': print(generate_name()) <commit_msg>Replace text files with database access.<commit_after>from __future__ import unicode_literals, print_function import sqlite3, os, random _select = 'select {} from {} order by random() limit 1' _uncommon_select = 'select value from uncommons where key=?' def generate_name(): conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db')) cursor = conn.cursor() adj = cursor.execute(_select.format('adjective', 'adjectives')).fetchone()[0] anim = cursor.execute(_select.format('animal', 'animals')).fetchone()[0] rare = cursor.execute(_select.format('name', 'rares')).fetchone()[0] uncommon_anim = cursor.execute(_uncommon_select, [adj]).fetchone() uncommon_adj = cursor.execute(_uncommon_select, [anim]).fetchone() conn.close() r = random.random() if r < 0.001 or r >= 0.999: return rare elif r < 0.3 and uncommon_anim is not None: return ' '.join((adj, uncommon_anim[0])) elif r >= 0.7 and uncommon_adj is not None: return ' '.join((uncommon_adj[0], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': print(generate_name())
33903a72a48a6d36792cec0f1fb3a6999c04b486
blendergltf/exporters/base.py
blendergltf/exporters/base.py
import json def _is_serializable(value): try: json.dumps(value) return True except TypeError: return False _IGNORED_CUSTOM_PROPS = [ '_RNA_UI', 'cycles', 'cycles_visibility', ] # pylint: disable=unused-argument class BaseExporter: gltf_key = '' blender_key = '' @classmethod def get_custom_properties(cls, blender_data): return { k: v.to_list() if hasattr(v, 'to_list') else v for k, v in blender_data.items() if k not in _IGNORED_CUSTOM_PROPS and _is_serializable(v) } @classmethod def check(cls, state, blender_data): return True @classmethod def default(cls, state, blender_data): return { 'name': blender_data.name } @classmethod def export(cls, state, blender_data): return {}
import json def _is_serializable(value): try: json.dumps(value) return True except TypeError: return False _IGNORED_CUSTOM_PROPS = [ '_RNA_UI', 'cycles', 'cycles_visibility', ] # pylint: disable=unused-argument class BaseExporter: gltf_key = '' blender_key = '' @classmethod def get_custom_properties(cls, blender_data): custom_props = { key: value.to_list() if hasattr(value, 'to_list') else value for key, value in blender_data.items() if key not in _IGNORED_CUSTOM_PROPS } custom_props = { key: value for key, value in custom_props.items() if _is_serializable(value) } return custom_props @classmethod def check(cls, state, blender_data): return True @classmethod def default(cls, state, blender_data): return { 'name': blender_data.name } @classmethod def export(cls, state, blender_data): return {}
Convert custom properties before checking for serializability
Convert custom properties before checking for serializability
Python
apache-2.0
Kupoman/blendergltf
import json def _is_serializable(value): try: json.dumps(value) return True except TypeError: return False _IGNORED_CUSTOM_PROPS = [ '_RNA_UI', 'cycles', 'cycles_visibility', ] # pylint: disable=unused-argument class BaseExporter: gltf_key = '' blender_key = '' @classmethod def get_custom_properties(cls, blender_data): return { k: v.to_list() if hasattr(v, 'to_list') else v for k, v in blender_data.items() if k not in _IGNORED_CUSTOM_PROPS and _is_serializable(v) } @classmethod def check(cls, state, blender_data): return True @classmethod def default(cls, state, blender_data): return { 'name': blender_data.name } @classmethod def export(cls, state, blender_data): return {} Convert custom properties before checking for serializability
import json def _is_serializable(value): try: json.dumps(value) return True except TypeError: return False _IGNORED_CUSTOM_PROPS = [ '_RNA_UI', 'cycles', 'cycles_visibility', ] # pylint: disable=unused-argument class BaseExporter: gltf_key = '' blender_key = '' @classmethod def get_custom_properties(cls, blender_data): custom_props = { key: value.to_list() if hasattr(value, 'to_list') else value for key, value in blender_data.items() if key not in _IGNORED_CUSTOM_PROPS } custom_props = { key: value for key, value in custom_props.items() if _is_serializable(value) } return custom_props @classmethod def check(cls, state, blender_data): return True @classmethod def default(cls, state, blender_data): return { 'name': blender_data.name } @classmethod def export(cls, state, blender_data): return {}
<commit_before>import json def _is_serializable(value): try: json.dumps(value) return True except TypeError: return False _IGNORED_CUSTOM_PROPS = [ '_RNA_UI', 'cycles', 'cycles_visibility', ] # pylint: disable=unused-argument class BaseExporter: gltf_key = '' blender_key = '' @classmethod def get_custom_properties(cls, blender_data): return { k: v.to_list() if hasattr(v, 'to_list') else v for k, v in blender_data.items() if k not in _IGNORED_CUSTOM_PROPS and _is_serializable(v) } @classmethod def check(cls, state, blender_data): return True @classmethod def default(cls, state, blender_data): return { 'name': blender_data.name } @classmethod def export(cls, state, blender_data): return {} <commit_msg>Convert custom properties before checking for serializability<commit_after>
import json def _is_serializable(value): try: json.dumps(value) return True except TypeError: return False _IGNORED_CUSTOM_PROPS = [ '_RNA_UI', 'cycles', 'cycles_visibility', ] # pylint: disable=unused-argument class BaseExporter: gltf_key = '' blender_key = '' @classmethod def get_custom_properties(cls, blender_data): custom_props = { key: value.to_list() if hasattr(value, 'to_list') else value for key, value in blender_data.items() if key not in _IGNORED_CUSTOM_PROPS } custom_props = { key: value for key, value in custom_props.items() if _is_serializable(value) } return custom_props @classmethod def check(cls, state, blender_data): return True @classmethod def default(cls, state, blender_data): return { 'name': blender_data.name } @classmethod def export(cls, state, blender_data): return {}
import json def _is_serializable(value): try: json.dumps(value) return True except TypeError: return False _IGNORED_CUSTOM_PROPS = [ '_RNA_UI', 'cycles', 'cycles_visibility', ] # pylint: disable=unused-argument class BaseExporter: gltf_key = '' blender_key = '' @classmethod def get_custom_properties(cls, blender_data): return { k: v.to_list() if hasattr(v, 'to_list') else v for k, v in blender_data.items() if k not in _IGNORED_CUSTOM_PROPS and _is_serializable(v) } @classmethod def check(cls, state, blender_data): return True @classmethod def default(cls, state, blender_data): return { 'name': blender_data.name } @classmethod def export(cls, state, blender_data): return {} Convert custom properties before checking for serializabilityimport json def _is_serializable(value): try: json.dumps(value) return True except TypeError: return False _IGNORED_CUSTOM_PROPS = [ '_RNA_UI', 'cycles', 'cycles_visibility', ] # pylint: disable=unused-argument class BaseExporter: gltf_key = '' blender_key = '' @classmethod def get_custom_properties(cls, blender_data): custom_props = { key: value.to_list() if hasattr(value, 'to_list') else value for key, value in blender_data.items() if key not in _IGNORED_CUSTOM_PROPS } custom_props = { key: value for key, value in custom_props.items() if _is_serializable(value) } return custom_props @classmethod def check(cls, state, blender_data): return True @classmethod def default(cls, state, blender_data): return { 'name': blender_data.name } @classmethod def export(cls, state, blender_data): return {}
<commit_before>import json def _is_serializable(value): try: json.dumps(value) return True except TypeError: return False _IGNORED_CUSTOM_PROPS = [ '_RNA_UI', 'cycles', 'cycles_visibility', ] # pylint: disable=unused-argument class BaseExporter: gltf_key = '' blender_key = '' @classmethod def get_custom_properties(cls, blender_data): return { k: v.to_list() if hasattr(v, 'to_list') else v for k, v in blender_data.items() if k not in _IGNORED_CUSTOM_PROPS and _is_serializable(v) } @classmethod def check(cls, state, blender_data): return True @classmethod def default(cls, state, blender_data): return { 'name': blender_data.name } @classmethod def export(cls, state, blender_data): return {} <commit_msg>Convert custom properties before checking for serializability<commit_after>import json def _is_serializable(value): try: json.dumps(value) return True except TypeError: return False _IGNORED_CUSTOM_PROPS = [ '_RNA_UI', 'cycles', 'cycles_visibility', ] # pylint: disable=unused-argument class BaseExporter: gltf_key = '' blender_key = '' @classmethod def get_custom_properties(cls, blender_data): custom_props = { key: value.to_list() if hasattr(value, 'to_list') else value for key, value in blender_data.items() if key not in _IGNORED_CUSTOM_PROPS } custom_props = { key: value for key, value in custom_props.items() if _is_serializable(value) } return custom_props @classmethod def check(cls, state, blender_data): return True @classmethod def default(cls, state, blender_data): return { 'name': blender_data.name } @classmethod def export(cls, state, blender_data): return {}
fc97832d0d96017ac71da125c3a7f29caceface6
app/mod_budget/model.py
app/mod_budget/model.py
from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True)
from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category)
Make category an optional field for an entry.
Make category an optional field for an entry. Income entries should not have a category. So the field should be made optional.
Python
mit
Zillolo/mana-vault,Zillolo/mana-vault,Zillolo/mana-vault
from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True) Make category an optional field for an entry. Income entries should not have a category. So the field should be made optional.
from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category)
<commit_before>from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True) <commit_msg>Make category an optional field for an entry. Income entries should not have a category. So the field should be made optional.<commit_after>
from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category)
from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True) Make category an optional field for an entry. Income entries should not have a category. So the field should be made optional.from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category)
<commit_before>from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True) <commit_msg>Make category an optional field for an entry. Income entries should not have a category. So the field should be made optional.<commit_after>from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category)
094cb428316ac0fceb0178d5a507e746550f4509
bin/task_usage_index.py
bin/task_usage_index.py
#!/usr/bin/env python3 import os, sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib')) import glob, json import numpy as np import task_usage def main(data_path, index_path): count = 0 index = [] for path in sorted(glob.glob('{}/**/*.sqlite3'.format(data_path))): data = task_usage.count_job_task_samples(path) for i in range(data.shape[0]): index.append({ 'path': path, 'job': int(data[i, 0]), 'task': int(data[i, 1]), 'count': int(data[i, 2]), }) count += 1 if count % 10000 == 0: print('Processed: {}'.format(count)) with open(index_path, 'w') as file: json.dump({'index': index}, file, indent=4) if __name__ == '__main__': assert(len(sys.argv) == 3) main(sys.argv[1], sys.argv[2])
#!/usr/bin/env python3 import os, sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib')) import glob, json import numpy as np import task_usage def main(data_path, index_path, report_each=10000): print('Looking for data in "{}"...'.format(data_path)) paths = sorted(glob.glob('{}/**/*.sqlite3'.format(data_path))) print('Processing {} databases...'.format(len(paths))) index = [] count = 0 for path in paths: data = task_usage.count_job_task_samples(path) for i in range(data.shape[0]): index.append({ 'path': path, 'job': int(data[i, 0]), 'task': int(data[i, 1]), 'length': int(data[i, 2]), }) count += 1 if count % report_each == 0: print('Processed: {}'.format(count)) print('Saving into "{}"...'.format(index_path)) with open(index_path, 'w') as file: json.dump({'index': index}, file, indent=4) if __name__ == '__main__': assert(len(sys.argv) == 3) main(sys.argv[1], sys.argv[2])
Print progress from the indexing script
Print progress from the indexing script
Python
mit
learning-on-chip/google-cluster-prediction
#!/usr/bin/env python3 import os, sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib')) import glob, json import numpy as np import task_usage def main(data_path, index_path): count = 0 index = [] for path in sorted(glob.glob('{}/**/*.sqlite3'.format(data_path))): data = task_usage.count_job_task_samples(path) for i in range(data.shape[0]): index.append({ 'path': path, 'job': int(data[i, 0]), 'task': int(data[i, 1]), 'count': int(data[i, 2]), }) count += 1 if count % 10000 == 0: print('Processed: {}'.format(count)) with open(index_path, 'w') as file: json.dump({'index': index}, file, indent=4) if __name__ == '__main__': assert(len(sys.argv) == 3) main(sys.argv[1], sys.argv[2]) Print progress from the indexing script
#!/usr/bin/env python3 import os, sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib')) import glob, json import numpy as np import task_usage def main(data_path, index_path, report_each=10000): print('Looking for data in "{}"...'.format(data_path)) paths = sorted(glob.glob('{}/**/*.sqlite3'.format(data_path))) print('Processing {} databases...'.format(len(paths))) index = [] count = 0 for path in paths: data = task_usage.count_job_task_samples(path) for i in range(data.shape[0]): index.append({ 'path': path, 'job': int(data[i, 0]), 'task': int(data[i, 1]), 'length': int(data[i, 2]), }) count += 1 if count % report_each == 0: print('Processed: {}'.format(count)) print('Saving into "{}"...'.format(index_path)) with open(index_path, 'w') as file: json.dump({'index': index}, file, indent=4) if __name__ == '__main__': assert(len(sys.argv) == 3) main(sys.argv[1], sys.argv[2])
<commit_before>#!/usr/bin/env python3 import os, sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib')) import glob, json import numpy as np import task_usage def main(data_path, index_path): count = 0 index = [] for path in sorted(glob.glob('{}/**/*.sqlite3'.format(data_path))): data = task_usage.count_job_task_samples(path) for i in range(data.shape[0]): index.append({ 'path': path, 'job': int(data[i, 0]), 'task': int(data[i, 1]), 'count': int(data[i, 2]), }) count += 1 if count % 10000 == 0: print('Processed: {}'.format(count)) with open(index_path, 'w') as file: json.dump({'index': index}, file, indent=4) if __name__ == '__main__': assert(len(sys.argv) == 3) main(sys.argv[1], sys.argv[2]) <commit_msg>Print progress from the indexing script<commit_after>
#!/usr/bin/env python3 import os, sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib')) import glob, json import numpy as np import task_usage def main(data_path, index_path, report_each=10000): print('Looking for data in "{}"...'.format(data_path)) paths = sorted(glob.glob('{}/**/*.sqlite3'.format(data_path))) print('Processing {} databases...'.format(len(paths))) index = [] count = 0 for path in paths: data = task_usage.count_job_task_samples(path) for i in range(data.shape[0]): index.append({ 'path': path, 'job': int(data[i, 0]), 'task': int(data[i, 1]), 'length': int(data[i, 2]), }) count += 1 if count % report_each == 0: print('Processed: {}'.format(count)) print('Saving into "{}"...'.format(index_path)) with open(index_path, 'w') as file: json.dump({'index': index}, file, indent=4) if __name__ == '__main__': assert(len(sys.argv) == 3) main(sys.argv[1], sys.argv[2])
#!/usr/bin/env python3 import os, sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib')) import glob, json import numpy as np import task_usage def main(data_path, index_path): count = 0 index = [] for path in sorted(glob.glob('{}/**/*.sqlite3'.format(data_path))): data = task_usage.count_job_task_samples(path) for i in range(data.shape[0]): index.append({ 'path': path, 'job': int(data[i, 0]), 'task': int(data[i, 1]), 'count': int(data[i, 2]), }) count += 1 if count % 10000 == 0: print('Processed: {}'.format(count)) with open(index_path, 'w') as file: json.dump({'index': index}, file, indent=4) if __name__ == '__main__': assert(len(sys.argv) == 3) main(sys.argv[1], sys.argv[2]) Print progress from the indexing script#!/usr/bin/env python3 import os, sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib')) import glob, json import numpy as np import task_usage def main(data_path, index_path, report_each=10000): print('Looking for data in "{}"...'.format(data_path)) paths = sorted(glob.glob('{}/**/*.sqlite3'.format(data_path))) print('Processing {} databases...'.format(len(paths))) index = [] count = 0 for path in paths: data = task_usage.count_job_task_samples(path) for i in range(data.shape[0]): index.append({ 'path': path, 'job': int(data[i, 0]), 'task': int(data[i, 1]), 'length': int(data[i, 2]), }) count += 1 if count % report_each == 0: print('Processed: {}'.format(count)) print('Saving into "{}"...'.format(index_path)) with open(index_path, 'w') as file: json.dump({'index': index}, file, indent=4) if __name__ == '__main__': assert(len(sys.argv) == 3) main(sys.argv[1], sys.argv[2])
<commit_before>#!/usr/bin/env python3 import os, sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib')) import glob, json import numpy as np import task_usage def main(data_path, index_path): count = 0 index = [] for path in sorted(glob.glob('{}/**/*.sqlite3'.format(data_path))): data = task_usage.count_job_task_samples(path) for i in range(data.shape[0]): index.append({ 'path': path, 'job': int(data[i, 0]), 'task': int(data[i, 1]), 'count': int(data[i, 2]), }) count += 1 if count % 10000 == 0: print('Processed: {}'.format(count)) with open(index_path, 'w') as file: json.dump({'index': index}, file, indent=4) if __name__ == '__main__': assert(len(sys.argv) == 3) main(sys.argv[1], sys.argv[2]) <commit_msg>Print progress from the indexing script<commit_after>#!/usr/bin/env python3 import os, sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib')) import glob, json import numpy as np import task_usage def main(data_path, index_path, report_each=10000): print('Looking for data in "{}"...'.format(data_path)) paths = sorted(glob.glob('{}/**/*.sqlite3'.format(data_path))) print('Processing {} databases...'.format(len(paths))) index = [] count = 0 for path in paths: data = task_usage.count_job_task_samples(path) for i in range(data.shape[0]): index.append({ 'path': path, 'job': int(data[i, 0]), 'task': int(data[i, 1]), 'length': int(data[i, 2]), }) count += 1 if count % report_each == 0: print('Processed: {}'.format(count)) print('Saving into "{}"...'.format(index_path)) with open(index_path, 'w') as file: json.dump({'index': index}, file, indent=4) if __name__ == '__main__': assert(len(sys.argv) == 3) main(sys.argv[1], sys.argv[2])
9ca46da9d0cf8b5f4b4a6e9234d7089665df5e8b
chef/tests/__init__.py
chef/tests/__init__.py
import os from unittest2 import TestCase from chef.api import ChefAPI TEST_ROOT = os.path.dirname(os.path.abspath(__file__)) def test_chef_api(): return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests') class ChefTestCase(TestCase): """Base class for Chef unittests.""" def setUp(self): super(ChefTestCase, self).setUp() self.api = test_chef_api self.api.set_default()
import os import random from unittest2 import TestCase from chef.api import ChefAPI TEST_ROOT = os.path.dirname(os.path.abspath(__file__)) def test_chef_api(): return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests') class ChefTestCase(TestCase): """Base class for Chef unittests.""" def setUp(self): super(ChefTestCase, self).setUp() self.api = test_chef_api self.api.set_default() def random(self, length=8, alphabet='0123456789abcdef'): return ''.join(random.choice(alphabet) for _ in xrange(length))
Add a method to generate random names for testing.
Add a method to generate random names for testing.
Python
apache-2.0
jarosser06/pychef,dipakvwarade/pychef,coderanger/pychef,jarosser06/pychef,Scalr/pychef,dipakvwarade/pychef,cread/pychef,Scalr/pychef,coderanger/pychef,cread/pychef
import os from unittest2 import TestCase from chef.api import ChefAPI TEST_ROOT = os.path.dirname(os.path.abspath(__file__)) def test_chef_api(): return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests') class ChefTestCase(TestCase): """Base class for Chef unittests.""" def setUp(self): super(ChefTestCase, self).setUp() self.api = test_chef_api self.api.set_default() Add a method to generate random names for testing.
import os import random from unittest2 import TestCase from chef.api import ChefAPI TEST_ROOT = os.path.dirname(os.path.abspath(__file__)) def test_chef_api(): return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests') class ChefTestCase(TestCase): """Base class for Chef unittests.""" def setUp(self): super(ChefTestCase, self).setUp() self.api = test_chef_api self.api.set_default() def random(self, length=8, alphabet='0123456789abcdef'): return ''.join(random.choice(alphabet) for _ in xrange(length))
<commit_before>import os from unittest2 import TestCase from chef.api import ChefAPI TEST_ROOT = os.path.dirname(os.path.abspath(__file__)) def test_chef_api(): return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests') class ChefTestCase(TestCase): """Base class for Chef unittests.""" def setUp(self): super(ChefTestCase, self).setUp() self.api = test_chef_api self.api.set_default() <commit_msg>Add a method to generate random names for testing.<commit_after>
import os import random from unittest2 import TestCase from chef.api import ChefAPI TEST_ROOT = os.path.dirname(os.path.abspath(__file__)) def test_chef_api(): return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests') class ChefTestCase(TestCase): """Base class for Chef unittests.""" def setUp(self): super(ChefTestCase, self).setUp() self.api = test_chef_api self.api.set_default() def random(self, length=8, alphabet='0123456789abcdef'): return ''.join(random.choice(alphabet) for _ in xrange(length))
import os from unittest2 import TestCase from chef.api import ChefAPI TEST_ROOT = os.path.dirname(os.path.abspath(__file__)) def test_chef_api(): return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests') class ChefTestCase(TestCase): """Base class for Chef unittests.""" def setUp(self): super(ChefTestCase, self).setUp() self.api = test_chef_api self.api.set_default() Add a method to generate random names for testing.import os import random from unittest2 import TestCase from chef.api import ChefAPI TEST_ROOT = os.path.dirname(os.path.abspath(__file__)) def test_chef_api(): return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests') class ChefTestCase(TestCase): """Base class for Chef unittests.""" def setUp(self): super(ChefTestCase, self).setUp() self.api = test_chef_api self.api.set_default() def random(self, length=8, alphabet='0123456789abcdef'): return ''.join(random.choice(alphabet) for _ in xrange(length))
<commit_before>import os from unittest2 import TestCase from chef.api import ChefAPI TEST_ROOT = os.path.dirname(os.path.abspath(__file__)) def test_chef_api(): return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests') class ChefTestCase(TestCase): """Base class for Chef unittests.""" def setUp(self): super(ChefTestCase, self).setUp() self.api = test_chef_api self.api.set_default() <commit_msg>Add a method to generate random names for testing.<commit_after>import os import random from unittest2 import TestCase from chef.api import ChefAPI TEST_ROOT = os.path.dirname(os.path.abspath(__file__)) def test_chef_api(): return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests') class ChefTestCase(TestCase): """Base class for Chef unittests.""" def setUp(self): super(ChefTestCase, self).setUp() self.api = test_chef_api self.api.set_default() def random(self, length=8, alphabet='0123456789abcdef'): return ''.join(random.choice(alphabet) for _ in xrange(length))
86a992dc15482087773f1591752a667a6014ba5d
docker/settings/celery.py
docker/settings/celery.py
from .docker_compose import DockerBaseSettings class CeleryDevSettings(DockerBaseSettings): pass CeleryDevSettings.load_settings(__name__)
from .docker_compose import DockerBaseSettings class CeleryDevSettings(DockerBaseSettings): # Since we can't properly set CORS on Azurite container # (see https://github.com/Azure/Azurite/issues/55#issuecomment-503380561) # trying to fetch ``objects.inv`` from celery container fails because the # URL is like http://docs.dev.readthedocs.io/... and it should be # http://storage:10000/... This setting fixes that. # Once we can use CORS, we should define this setting in the # ``docker_compose.py`` file instead. AZURE_MEDIA_STORAGE_HOSTNAME = 'storage:10000' CeleryDevSettings.load_settings(__name__)
Use proper domain for AZURE_MEDIA_STORAGE_HOSTNAME
Use proper domain for AZURE_MEDIA_STORAGE_HOSTNAME We can't access docs.dev.readthedocs.io from celery container because that domain points to 127.0.0.1 and we don't have the storage in that IP. So, we need to override the AZURE_MEDIA_STORAGE_HOSTNAME in the celery container to point to the storage. We should do this directly in `docker_compose.py` settings file, but since we can't configure CORS in Azurite we can't do it yet.
Python
mit
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
from .docker_compose import DockerBaseSettings class CeleryDevSettings(DockerBaseSettings): pass CeleryDevSettings.load_settings(__name__) Use proper domain for AZURE_MEDIA_STORAGE_HOSTNAME We can't access docs.dev.readthedocs.io from celery container because that domain points to 127.0.0.1 and we don't have the storage in that IP. So, we need to override the AZURE_MEDIA_STORAGE_HOSTNAME in the celery container to point to the storage. We should do this directly in `docker_compose.py` settings file, but since we can't configure CORS in Azurite we can't do it yet.
from .docker_compose import DockerBaseSettings class CeleryDevSettings(DockerBaseSettings): # Since we can't properly set CORS on Azurite container # (see https://github.com/Azure/Azurite/issues/55#issuecomment-503380561) # trying to fetch ``objects.inv`` from celery container fails because the # URL is like http://docs.dev.readthedocs.io/... and it should be # http://storage:10000/... This setting fixes that. # Once we can use CORS, we should define this setting in the # ``docker_compose.py`` file instead. AZURE_MEDIA_STORAGE_HOSTNAME = 'storage:10000' CeleryDevSettings.load_settings(__name__)
<commit_before>from .docker_compose import DockerBaseSettings class CeleryDevSettings(DockerBaseSettings): pass CeleryDevSettings.load_settings(__name__) <commit_msg>Use proper domain for AZURE_MEDIA_STORAGE_HOSTNAME We can't access docs.dev.readthedocs.io from celery container because that domain points to 127.0.0.1 and we don't have the storage in that IP. So, we need to override the AZURE_MEDIA_STORAGE_HOSTNAME in the celery container to point to the storage. We should do this directly in `docker_compose.py` settings file, but since we can't configure CORS in Azurite we can't do it yet.<commit_after>
from .docker_compose import DockerBaseSettings class CeleryDevSettings(DockerBaseSettings): # Since we can't properly set CORS on Azurite container # (see https://github.com/Azure/Azurite/issues/55#issuecomment-503380561) # trying to fetch ``objects.inv`` from celery container fails because the # URL is like http://docs.dev.readthedocs.io/... and it should be # http://storage:10000/... This setting fixes that. # Once we can use CORS, we should define this setting in the # ``docker_compose.py`` file instead. AZURE_MEDIA_STORAGE_HOSTNAME = 'storage:10000' CeleryDevSettings.load_settings(__name__)
from .docker_compose import DockerBaseSettings class CeleryDevSettings(DockerBaseSettings): pass CeleryDevSettings.load_settings(__name__) Use proper domain for AZURE_MEDIA_STORAGE_HOSTNAME We can't access docs.dev.readthedocs.io from celery container because that domain points to 127.0.0.1 and we don't have the storage in that IP. So, we need to override the AZURE_MEDIA_STORAGE_HOSTNAME in the celery container to point to the storage. We should do this directly in `docker_compose.py` settings file, but since we can't configure CORS in Azurite we can't do it yet.from .docker_compose import DockerBaseSettings class CeleryDevSettings(DockerBaseSettings): # Since we can't properly set CORS on Azurite container # (see https://github.com/Azure/Azurite/issues/55#issuecomment-503380561) # trying to fetch ``objects.inv`` from celery container fails because the # URL is like http://docs.dev.readthedocs.io/... and it should be # http://storage:10000/... This setting fixes that. # Once we can use CORS, we should define this setting in the # ``docker_compose.py`` file instead. AZURE_MEDIA_STORAGE_HOSTNAME = 'storage:10000' CeleryDevSettings.load_settings(__name__)
<commit_before>from .docker_compose import DockerBaseSettings class CeleryDevSettings(DockerBaseSettings): pass CeleryDevSettings.load_settings(__name__) <commit_msg>Use proper domain for AZURE_MEDIA_STORAGE_HOSTNAME We can't access docs.dev.readthedocs.io from celery container because that domain points to 127.0.0.1 and we don't have the storage in that IP. So, we need to override the AZURE_MEDIA_STORAGE_HOSTNAME in the celery container to point to the storage. We should do this directly in `docker_compose.py` settings file, but since we can't configure CORS in Azurite we can't do it yet.<commit_after>from .docker_compose import DockerBaseSettings class CeleryDevSettings(DockerBaseSettings): # Since we can't properly set CORS on Azurite container # (see https://github.com/Azure/Azurite/issues/55#issuecomment-503380561) # trying to fetch ``objects.inv`` from celery container fails because the # URL is like http://docs.dev.readthedocs.io/... and it should be # http://storage:10000/... This setting fixes that. # Once we can use CORS, we should define this setting in the # ``docker_compose.py`` file instead. AZURE_MEDIA_STORAGE_HOSTNAME = 'storage:10000' CeleryDevSettings.load_settings(__name__)