commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7f1c05f23533b9b84eb983e78160ff31ce5d4ab6
|
perpendicular-least-squares.py
|
perpendicular-least-squares.py
|
__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
from astropy.io import fits
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters(*args):
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type_solution):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
def check_guess(cluster, type_solution, guess):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: check guess with another cluster
def bootstrap_cluster(cluster):
# TODO: bootstrap a cluster to get a different distribution to check with check_guess
return 0
def determine_uncertainty(solutions):
# TODO: Take a list or dict of solutions and determine the uncertainty in them
return 0
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
|
__author__ = 'Jacob Bieker'
import os, sys, random
import numpy
from multiprocessing import Pool
from astropy.io import fits
def random_number(number, seed):
if seed > 0:
seed = -seed
random.seed(a=seed)
for i in range(number):
rand_num = random.randint(0,1)
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters(*args):
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type_solution):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
def check_guess(cluster, type_solution, guess):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: check guess with another cluster
def bootstrap_cluster(cluster):
# TODO: bootstrap a cluster to get a different distribution to check with check_guess
return 0
def determine_uncertainty(solutions):
# TODO: Take a list or dict of solutions and determine the uncertainty in them
return 0
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
|
Add Random number with seed
|
Add Random number with seed
|
Python
|
mit
|
jacobbieker/GCP-perpendicular-least-squares,jacobbieker/GCP-perpendicular-least-squares,jacobbieker/GCP-perpendicular-least-squares
|
__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
from astropy.io import fits
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters(*args):
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type_solution):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
def check_guess(cluster, type_solution, guess):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: check guess with another cluster
def bootstrap_cluster(cluster):
# TODO: bootstrap a cluster to get a different distribution to check with check_guess
return 0
def determine_uncertainty(solutions):
# TODO: Take a list or dict of solutions and determine the uncertainty in them
return 0
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
Add Random number with seed
|
__author__ = 'Jacob Bieker'
import os, sys, random
import numpy
from multiprocessing import Pool
from astropy.io import fits
def random_number(number, seed):
if seed > 0:
seed = -seed
random.seed(a=seed)
for i in range(number):
rand_num = random.randint(0,1)
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters(*args):
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type_solution):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
def check_guess(cluster, type_solution, guess):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: check guess with another cluster
def bootstrap_cluster(cluster):
# TODO: bootstrap a cluster to get a different distribution to check with check_guess
return 0
def determine_uncertainty(solutions):
# TODO: Take a list or dict of solutions and determine the uncertainty in them
return 0
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
|
<commit_before>__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
from astropy.io import fits
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters(*args):
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type_solution):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
def check_guess(cluster, type_solution, guess):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: check guess with another cluster
def bootstrap_cluster(cluster):
# TODO: bootstrap a cluster to get a different distribution to check with check_guess
return 0
def determine_uncertainty(solutions):
# TODO: Take a list or dict of solutions and determine the uncertainty in them
return 0
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
<commit_msg>Add Random number with seed<commit_after>
|
__author__ = 'Jacob Bieker'
import os, sys, random
import numpy
from multiprocessing import Pool
from astropy.io import fits
def random_number(number, seed):
if seed > 0:
seed = -seed
random.seed(a=seed)
for i in range(number):
rand_num = random.randint(0,1)
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters(*args):
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type_solution):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
def check_guess(cluster, type_solution, guess):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: check guess with another cluster
def bootstrap_cluster(cluster):
# TODO: bootstrap a cluster to get a different distribution to check with check_guess
return 0
def determine_uncertainty(solutions):
# TODO: Take a list or dict of solutions and determine the uncertainty in them
return 0
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
|
__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
from astropy.io import fits
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters(*args):
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type_solution):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
def check_guess(cluster, type_solution, guess):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: check guess with another cluster
def bootstrap_cluster(cluster):
# TODO: bootstrap a cluster to get a different distribution to check with check_guess
return 0
def determine_uncertainty(solutions):
# TODO: Take a list or dict of solutions and determine the uncertainty in them
return 0
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
Add Random number with seed__author__ = 'Jacob Bieker'
import os, sys, random
import numpy
from multiprocessing import Pool
from astropy.io import fits
def random_number(number, seed):
if seed > 0:
seed = -seed
random.seed(a=seed)
for i in range(number):
rand_num = random.randint(0,1)
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters(*args):
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type_solution):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
def check_guess(cluster, type_solution, guess):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: check guess with another cluster
def bootstrap_cluster(cluster):
# TODO: bootstrap a cluster to get a different distribution to check with check_guess
return 0
def determine_uncertainty(solutions):
# TODO: Take a list or dict of solutions and determine the uncertainty in them
return 0
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
|
<commit_before>__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
from astropy.io import fits
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters(*args):
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type_solution):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
def check_guess(cluster, type_solution, guess):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: check guess with another cluster
def bootstrap_cluster(cluster):
# TODO: bootstrap a cluster to get a different distribution to check with check_guess
return 0
def determine_uncertainty(solutions):
# TODO: Take a list or dict of solutions and determine the uncertainty in them
return 0
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
<commit_msg>Add Random number with seed<commit_after>__author__ = 'Jacob Bieker'
import os, sys, random
import numpy
from multiprocessing import Pool
from astropy.io import fits
def random_number(number, seed):
if seed > 0:
seed = -seed
random.seed(a=seed)
for i in range(number):
rand_num = random.randint(0,1)
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters(*args):
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type_solution):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
def check_guess(cluster, type_solution, guess):
if type_solution == "line":
return 0
elif type_solution == "plane":
return 0
# TODO: check guess with another cluster
def bootstrap_cluster(cluster):
# TODO: bootstrap a cluster to get a different distribution to check with check_guess
return 0
def determine_uncertainty(solutions):
# TODO: Take a list or dict of solutions and determine the uncertainty in them
return 0
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
|
756239f128f55481bb26e11fb21a4fe1fc5febb8
|
thumbnails/tests/storage.py
|
thumbnails/tests/storage.py
|
import tempfile
import shutil
from django.core.files.storage import FileSystemStorage
"""
Temporary Storage class for test. Copied from Smiley Chris' Easy Thumbnails test package
https://github.com/SmileyChris/easy-thumbnails/blob/master/easy_thumbnails/test.py
"""
class TemporaryStorage(FileSystemStorage):
"""
A storage class useful for tests that uses a temporary location to store
all files and provides a method to remove this location when it is finished
with.
"""
def __init__(self, location=None, *args, **kwargs):
"""
Create the temporary location.
"""
if location is None:
location = tempfile.mkdtemp()
self.temporary_location = location
super(TemporaryStorage, self).__init__(location=location, *args,
**kwargs)
def delete_temporary_storage(self):
"""
Delete the temporary directory created during initialisation.
This storage class should not be used again after this method is
called.
"""
temporary_location = getattr(self, 'temporary_location', None)
if temporary_location:
shutil.rmtree(temporary_location)
|
import os
import shutil
import tempfile
from django.core.files.storage import FileSystemStorage
"""
Temporary Storage class for test. Copied from Smiley Chris' Easy Thumbnails test package
https://github.com/SmileyChris/easy-thumbnails/blob/master/easy_thumbnails/test.py
"""
class TemporaryStorage(FileSystemStorage):
"""
A storage class useful for tests that uses a temporary location to store
all files and provides a method to remove this location when it is finished
with.
"""
def __init__(self, *args, **kwargs):
"""
Create the temporary location.
"""
self.temporary_location = os.path.join(tempfile.gettempdir(), 'thumbs_test')
super(TemporaryStorage, self).__init__(location=self.temporary_location, *args,
**kwargs)
def delete_temporary_storage(self):
"""
Delete the temporary directory created during initialisation.
This storage class should not be used again after this method is
called.
"""
temporary_location = getattr(self, 'temporary_location', None)
if temporary_location:
shutil.rmtree(temporary_location)
|
Make TemporaryStorage backend's location less random.
|
Make TemporaryStorage backend's location less random.
|
Python
|
mit
|
ui/django-thumbnails
|
import tempfile
import shutil
from django.core.files.storage import FileSystemStorage
"""
Temporary Storage class for test. Copied from Smiley Chris' Easy Thumbnails test package
https://github.com/SmileyChris/easy-thumbnails/blob/master/easy_thumbnails/test.py
"""
class TemporaryStorage(FileSystemStorage):
"""
A storage class useful for tests that uses a temporary location to store
all files and provides a method to remove this location when it is finished
with.
"""
def __init__(self, location=None, *args, **kwargs):
"""
Create the temporary location.
"""
if location is None:
location = tempfile.mkdtemp()
self.temporary_location = location
super(TemporaryStorage, self).__init__(location=location, *args,
**kwargs)
def delete_temporary_storage(self):
"""
Delete the temporary directory created during initialisation.
This storage class should not be used again after this method is
called.
"""
temporary_location = getattr(self, 'temporary_location', None)
if temporary_location:
shutil.rmtree(temporary_location)
Make TemporaryStorage backend's location less random.
|
import os
import shutil
import tempfile
from django.core.files.storage import FileSystemStorage
"""
Temporary Storage class for test. Copied from Smiley Chris' Easy Thumbnails test package
https://github.com/SmileyChris/easy-thumbnails/blob/master/easy_thumbnails/test.py
"""
class TemporaryStorage(FileSystemStorage):
"""
A storage class useful for tests that uses a temporary location to store
all files and provides a method to remove this location when it is finished
with.
"""
def __init__(self, *args, **kwargs):
"""
Create the temporary location.
"""
self.temporary_location = os.path.join(tempfile.gettempdir(), 'thumbs_test')
super(TemporaryStorage, self).__init__(location=self.temporary_location, *args,
**kwargs)
def delete_temporary_storage(self):
"""
Delete the temporary directory created during initialisation.
This storage class should not be used again after this method is
called.
"""
temporary_location = getattr(self, 'temporary_location', None)
if temporary_location:
shutil.rmtree(temporary_location)
|
<commit_before>import tempfile
import shutil
from django.core.files.storage import FileSystemStorage
"""
Temporary Storage class for test. Copied from Smiley Chris' Easy Thumbnails test package
https://github.com/SmileyChris/easy-thumbnails/blob/master/easy_thumbnails/test.py
"""
class TemporaryStorage(FileSystemStorage):
"""
A storage class useful for tests that uses a temporary location to store
all files and provides a method to remove this location when it is finished
with.
"""
def __init__(self, location=None, *args, **kwargs):
"""
Create the temporary location.
"""
if location is None:
location = tempfile.mkdtemp()
self.temporary_location = location
super(TemporaryStorage, self).__init__(location=location, *args,
**kwargs)
def delete_temporary_storage(self):
"""
Delete the temporary directory created during initialisation.
This storage class should not be used again after this method is
called.
"""
temporary_location = getattr(self, 'temporary_location', None)
if temporary_location:
shutil.rmtree(temporary_location)
<commit_msg>Make TemporaryStorage backend's location less random.<commit_after>
|
import os
import shutil
import tempfile
from django.core.files.storage import FileSystemStorage
"""
Temporary Storage class for test. Copied from Smiley Chris' Easy Thumbnails test package
https://github.com/SmileyChris/easy-thumbnails/blob/master/easy_thumbnails/test.py
"""
class TemporaryStorage(FileSystemStorage):
"""
A storage class useful for tests that uses a temporary location to store
all files and provides a method to remove this location when it is finished
with.
"""
def __init__(self, *args, **kwargs):
"""
Create the temporary location.
"""
self.temporary_location = os.path.join(tempfile.gettempdir(), 'thumbs_test')
super(TemporaryStorage, self).__init__(location=self.temporary_location, *args,
**kwargs)
def delete_temporary_storage(self):
"""
Delete the temporary directory created during initialisation.
This storage class should not be used again after this method is
called.
"""
temporary_location = getattr(self, 'temporary_location', None)
if temporary_location:
shutil.rmtree(temporary_location)
|
import tempfile
import shutil
from django.core.files.storage import FileSystemStorage
"""
Temporary Storage class for test. Copied from Smiley Chris' Easy Thumbnails test package
https://github.com/SmileyChris/easy-thumbnails/blob/master/easy_thumbnails/test.py
"""
class TemporaryStorage(FileSystemStorage):
"""
A storage class useful for tests that uses a temporary location to store
all files and provides a method to remove this location when it is finished
with.
"""
def __init__(self, location=None, *args, **kwargs):
"""
Create the temporary location.
"""
if location is None:
location = tempfile.mkdtemp()
self.temporary_location = location
super(TemporaryStorage, self).__init__(location=location, *args,
**kwargs)
def delete_temporary_storage(self):
"""
Delete the temporary directory created during initialisation.
This storage class should not be used again after this method is
called.
"""
temporary_location = getattr(self, 'temporary_location', None)
if temporary_location:
shutil.rmtree(temporary_location)
Make TemporaryStorage backend's location less random.import os
import shutil
import tempfile
from django.core.files.storage import FileSystemStorage
"""
Temporary Storage class for test. Copied from Smiley Chris' Easy Thumbnails test package
https://github.com/SmileyChris/easy-thumbnails/blob/master/easy_thumbnails/test.py
"""
class TemporaryStorage(FileSystemStorage):
"""
A storage class useful for tests that uses a temporary location to store
all files and provides a method to remove this location when it is finished
with.
"""
def __init__(self, *args, **kwargs):
"""
Create the temporary location.
"""
self.temporary_location = os.path.join(tempfile.gettempdir(), 'thumbs_test')
super(TemporaryStorage, self).__init__(location=self.temporary_location, *args,
**kwargs)
def delete_temporary_storage(self):
"""
Delete the temporary directory created during initialisation.
This storage class should not be used again after this method is
called.
"""
temporary_location = getattr(self, 'temporary_location', None)
if temporary_location:
shutil.rmtree(temporary_location)
|
<commit_before>import tempfile
import shutil
from django.core.files.storage import FileSystemStorage
"""
Temporary Storage class for test. Copied from Smiley Chris' Easy Thumbnails test package
https://github.com/SmileyChris/easy-thumbnails/blob/master/easy_thumbnails/test.py
"""
class TemporaryStorage(FileSystemStorage):
"""
A storage class useful for tests that uses a temporary location to store
all files and provides a method to remove this location when it is finished
with.
"""
def __init__(self, location=None, *args, **kwargs):
"""
Create the temporary location.
"""
if location is None:
location = tempfile.mkdtemp()
self.temporary_location = location
super(TemporaryStorage, self).__init__(location=location, *args,
**kwargs)
def delete_temporary_storage(self):
"""
Delete the temporary directory created during initialisation.
This storage class should not be used again after this method is
called.
"""
temporary_location = getattr(self, 'temporary_location', None)
if temporary_location:
shutil.rmtree(temporary_location)
<commit_msg>Make TemporaryStorage backend's location less random.<commit_after>import os
import shutil
import tempfile
from django.core.files.storage import FileSystemStorage
"""
Temporary Storage class for test. Copied from Smiley Chris' Easy Thumbnails test package
https://github.com/SmileyChris/easy-thumbnails/blob/master/easy_thumbnails/test.py
"""
class TemporaryStorage(FileSystemStorage):
"""
A storage class useful for tests that uses a temporary location to store
all files and provides a method to remove this location when it is finished
with.
"""
def __init__(self, *args, **kwargs):
"""
Create the temporary location.
"""
self.temporary_location = os.path.join(tempfile.gettempdir(), 'thumbs_test')
super(TemporaryStorage, self).__init__(location=self.temporary_location, *args,
**kwargs)
def delete_temporary_storage(self):
"""
Delete the temporary directory created during initialisation.
This storage class should not be used again after this method is
called.
"""
temporary_location = getattr(self, 'temporary_location', None)
if temporary_location:
shutil.rmtree(temporary_location)
|
234b3f157295baedca91895d2a2cb9e6f8355e2e
|
pyim/tools/annotate/main.py
|
pyim/tools/annotate/main.py
|
import sys
import argparse
import pandas as pd
from pyim.tools.annotate.rbm import RbmAnnotator
ANNOTATORS = {
'rbm': RbmAnnotator
}
def main():
# Setup main argument parser and annotator specific sub-parsers.
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='sub-command help')
for name, class_ in ANNOTATORS.items():
class_.register_parser(subparsers, name)
args = parser.parse_args()
if 'annotator' not in args:
parser.print_help()
sys.exit(2)
else:
# Extract input/output parameters.
options = vars(args)
input_path = options.pop('input')
output_path = options.pop('output')
# Construct annotator.
class_ = options.pop('annotator')
annotator = class_(**options)
# Load input file.
frame = pd.read_csv(input_path, sep='\t',
dtype={'seqname': str, 'location': int, 'strand': int})
# Do annotation and write outputs!
result = annotator.annotate(frame)
result.to_csv(output_path, sep='\t', index=False)
if __name__ == '__main__':
main()
|
import sys
import argparse
import pandas as pd
from pyim.tools.annotate.rbm import RbmAnnotator
ANNOTATORS = {
'rbm': RbmAnnotator
}
def main():
# Setup main argument parser and annotator specific sub-parsers.
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='Annotator to use.')
for name, class_ in ANNOTATORS.items():
class_.register_parser(subparsers, name)
args = parser.parse_args()
if 'annotator' not in args:
parser.print_help()
sys.exit(2)
else:
# Extract input/output parameters.
options = vars(args)
input_path = options.pop('input')
output_path = options.pop('output')
# Construct annotator.
class_ = options.pop('annotator')
annotator = class_(**options)
# Load input file.
frame = pd.read_csv(input_path, sep='\t',
dtype={'seqname': str, 'location': int, 'strand': int})
# Do annotation and write outputs!
result = annotator.annotate(frame)
result.to_csv(output_path, sep='\t', index=False)
if __name__ == '__main__':
main()
|
Add help description for annotator.
|
Add help description for annotator.
|
Python
|
mit
|
jrderuiter/pyim,jrderuiter/pyim
|
import sys
import argparse
import pandas as pd
from pyim.tools.annotate.rbm import RbmAnnotator
ANNOTATORS = {
'rbm': RbmAnnotator
}
def main():
# Setup main argument parser and annotator specific sub-parsers.
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='sub-command help')
for name, class_ in ANNOTATORS.items():
class_.register_parser(subparsers, name)
args = parser.parse_args()
if 'annotator' not in args:
parser.print_help()
sys.exit(2)
else:
# Extract input/output parameters.
options = vars(args)
input_path = options.pop('input')
output_path = options.pop('output')
# Construct annotator.
class_ = options.pop('annotator')
annotator = class_(**options)
# Load input file.
frame = pd.read_csv(input_path, sep='\t',
dtype={'seqname': str, 'location': int, 'strand': int})
# Do annotation and write outputs!
result = annotator.annotate(frame)
result.to_csv(output_path, sep='\t', index=False)
if __name__ == '__main__':
main()
Add help description for annotator.
|
import sys
import argparse
import pandas as pd
from pyim.tools.annotate.rbm import RbmAnnotator
ANNOTATORS = {
'rbm': RbmAnnotator
}
def main():
# Setup main argument parser and annotator specific sub-parsers.
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='Annotator to use.')
for name, class_ in ANNOTATORS.items():
class_.register_parser(subparsers, name)
args = parser.parse_args()
if 'annotator' not in args:
parser.print_help()
sys.exit(2)
else:
# Extract input/output parameters.
options = vars(args)
input_path = options.pop('input')
output_path = options.pop('output')
# Construct annotator.
class_ = options.pop('annotator')
annotator = class_(**options)
# Load input file.
frame = pd.read_csv(input_path, sep='\t',
dtype={'seqname': str, 'location': int, 'strand': int})
# Do annotation and write outputs!
result = annotator.annotate(frame)
result.to_csv(output_path, sep='\t', index=False)
if __name__ == '__main__':
main()
|
<commit_before>import sys
import argparse
import pandas as pd
from pyim.tools.annotate.rbm import RbmAnnotator
ANNOTATORS = {
'rbm': RbmAnnotator
}
def main():
# Setup main argument parser and annotator specific sub-parsers.
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='sub-command help')
for name, class_ in ANNOTATORS.items():
class_.register_parser(subparsers, name)
args = parser.parse_args()
if 'annotator' not in args:
parser.print_help()
sys.exit(2)
else:
# Extract input/output parameters.
options = vars(args)
input_path = options.pop('input')
output_path = options.pop('output')
# Construct annotator.
class_ = options.pop('annotator')
annotator = class_(**options)
# Load input file.
frame = pd.read_csv(input_path, sep='\t',
dtype={'seqname': str, 'location': int, 'strand': int})
# Do annotation and write outputs!
result = annotator.annotate(frame)
result.to_csv(output_path, sep='\t', index=False)
if __name__ == '__main__':
main()
<commit_msg>Add help description for annotator.<commit_after>
|
import sys
import argparse
import pandas as pd
from pyim.tools.annotate.rbm import RbmAnnotator
ANNOTATORS = {
'rbm': RbmAnnotator
}
def main():
# Setup main argument parser and annotator specific sub-parsers.
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='Annotator to use.')
for name, class_ in ANNOTATORS.items():
class_.register_parser(subparsers, name)
args = parser.parse_args()
if 'annotator' not in args:
parser.print_help()
sys.exit(2)
else:
# Extract input/output parameters.
options = vars(args)
input_path = options.pop('input')
output_path = options.pop('output')
# Construct annotator.
class_ = options.pop('annotator')
annotator = class_(**options)
# Load input file.
frame = pd.read_csv(input_path, sep='\t',
dtype={'seqname': str, 'location': int, 'strand': int})
# Do annotation and write outputs!
result = annotator.annotate(frame)
result.to_csv(output_path, sep='\t', index=False)
if __name__ == '__main__':
main()
|
import sys
import argparse
import pandas as pd
from pyim.tools.annotate.rbm import RbmAnnotator
ANNOTATORS = {
'rbm': RbmAnnotator
}
def main():
# Setup main argument parser and annotator specific sub-parsers.
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='sub-command help')
for name, class_ in ANNOTATORS.items():
class_.register_parser(subparsers, name)
args = parser.parse_args()
if 'annotator' not in args:
parser.print_help()
sys.exit(2)
else:
# Extract input/output parameters.
options = vars(args)
input_path = options.pop('input')
output_path = options.pop('output')
# Construct annotator.
class_ = options.pop('annotator')
annotator = class_(**options)
# Load input file.
frame = pd.read_csv(input_path, sep='\t',
dtype={'seqname': str, 'location': int, 'strand': int})
# Do annotation and write outputs!
result = annotator.annotate(frame)
result.to_csv(output_path, sep='\t', index=False)
if __name__ == '__main__':
main()
Add help description for annotator.import sys
import argparse
import pandas as pd
from pyim.tools.annotate.rbm import RbmAnnotator
ANNOTATORS = {
'rbm': RbmAnnotator
}
def main():
# Setup main argument parser and annotator specific sub-parsers.
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='Annotator to use.')
for name, class_ in ANNOTATORS.items():
class_.register_parser(subparsers, name)
args = parser.parse_args()
if 'annotator' not in args:
parser.print_help()
sys.exit(2)
else:
# Extract input/output parameters.
options = vars(args)
input_path = options.pop('input')
output_path = options.pop('output')
# Construct annotator.
class_ = options.pop('annotator')
annotator = class_(**options)
# Load input file.
frame = pd.read_csv(input_path, sep='\t',
dtype={'seqname': str, 'location': int, 'strand': int})
# Do annotation and write outputs!
result = annotator.annotate(frame)
result.to_csv(output_path, sep='\t', index=False)
if __name__ == '__main__':
main()
|
<commit_before>import sys
import argparse
import pandas as pd
from pyim.tools.annotate.rbm import RbmAnnotator
ANNOTATORS = {
'rbm': RbmAnnotator
}
def main():
# Setup main argument parser and annotator specific sub-parsers.
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='sub-command help')
for name, class_ in ANNOTATORS.items():
class_.register_parser(subparsers, name)
args = parser.parse_args()
if 'annotator' not in args:
parser.print_help()
sys.exit(2)
else:
# Extract input/output parameters.
options = vars(args)
input_path = options.pop('input')
output_path = options.pop('output')
# Construct annotator.
class_ = options.pop('annotator')
annotator = class_(**options)
# Load input file.
frame = pd.read_csv(input_path, sep='\t',
dtype={'seqname': str, 'location': int, 'strand': int})
# Do annotation and write outputs!
result = annotator.annotate(frame)
result.to_csv(output_path, sep='\t', index=False)
if __name__ == '__main__':
main()
<commit_msg>Add help description for annotator.<commit_after>import sys
import argparse
import pandas as pd
from pyim.tools.annotate.rbm import RbmAnnotator
ANNOTATORS = {
'rbm': RbmAnnotator
}
def main():
# Setup main argument parser and annotator specific sub-parsers.
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='Annotator to use.')
for name, class_ in ANNOTATORS.items():
class_.register_parser(subparsers, name)
args = parser.parse_args()
if 'annotator' not in args:
parser.print_help()
sys.exit(2)
else:
# Extract input/output parameters.
options = vars(args)
input_path = options.pop('input')
output_path = options.pop('output')
# Construct annotator.
class_ = options.pop('annotator')
annotator = class_(**options)
# Load input file.
frame = pd.read_csv(input_path, sep='\t',
dtype={'seqname': str, 'location': int, 'strand': int})
# Do annotation and write outputs!
result = annotator.annotate(frame)
result.to_csv(output_path, sep='\t', index=False)
if __name__ == '__main__':
main()
|
9b0571623a0017f96f9945fe263cd302faa11c2e
|
sparkback/__init__.py
|
sparkback/__init__.py
|
# -*- coding: utf-8 -*-
import sys
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(d):
data_range = max(d) - min(d)
divider = data_range / (len(ticks) - 1)
min_value = min(d)
scaled = [int(abs(round((i - min_value) / divider))) for i in d]
return scaled
def print_ansi_spark(d):
for i in d:
sys.stdout.write(ticks[i])
print ''
if __name__ == "__main__":
print 'hello world'
|
# -*- coding: utf-8 -*-
from __future__ import division
import argparse
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
print m,n
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('integers', metavar='N', type=int, nargs='+',
help='an integer for the accumulator')
args = parser.parse_args()
print_ansi_spark(scale_data(args.integers))
|
Make division float to fix divide by zero issues
|
Make division float to fix divide by zero issues
|
Python
|
mit
|
mmichie/sparkback
|
# -*- coding: utf-8 -*-
import sys
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(d):
data_range = max(d) - min(d)
divider = data_range / (len(ticks) - 1)
min_value = min(d)
scaled = [int(abs(round((i - min_value) / divider))) for i in d]
return scaled
def print_ansi_spark(d):
for i in d:
sys.stdout.write(ticks[i])
print ''
if __name__ == "__main__":
print 'hello world'
Make division float to fix divide by zero issues
|
# -*- coding: utf-8 -*-
from __future__ import division
import argparse
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
print m,n
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('integers', metavar='N', type=int, nargs='+',
help='an integer for the accumulator')
args = parser.parse_args()
print_ansi_spark(scale_data(args.integers))
|
<commit_before>
# -*- coding: utf-8 -*-
import sys
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(d):
data_range = max(d) - min(d)
divider = data_range / (len(ticks) - 1)
min_value = min(d)
scaled = [int(abs(round((i - min_value) / divider))) for i in d]
return scaled
def print_ansi_spark(d):
for i in d:
sys.stdout.write(ticks[i])
print ''
if __name__ == "__main__":
print 'hello world'
<commit_msg>Make division float to fix divide by zero issues<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import division
import argparse
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
print m,n
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('integers', metavar='N', type=int, nargs='+',
help='an integer for the accumulator')
args = parser.parse_args()
print_ansi_spark(scale_data(args.integers))
|
# -*- coding: utf-8 -*-
import sys
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(d):
data_range = max(d) - min(d)
divider = data_range / (len(ticks) - 1)
min_value = min(d)
scaled = [int(abs(round((i - min_value) / divider))) for i in d]
return scaled
def print_ansi_spark(d):
for i in d:
sys.stdout.write(ticks[i])
print ''
if __name__ == "__main__":
print 'hello world'
Make division float to fix divide by zero issues# -*- coding: utf-8 -*-
from __future__ import division
import argparse
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
print m,n
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('integers', metavar='N', type=int, nargs='+',
help='an integer for the accumulator')
args = parser.parse_args()
print_ansi_spark(scale_data(args.integers))
|
<commit_before>
# -*- coding: utf-8 -*-
import sys
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(d):
data_range = max(d) - min(d)
divider = data_range / (len(ticks) - 1)
min_value = min(d)
scaled = [int(abs(round((i - min_value) / divider))) for i in d]
return scaled
def print_ansi_spark(d):
for i in d:
sys.stdout.write(ticks[i])
print ''
if __name__ == "__main__":
print 'hello world'
<commit_msg>Make division float to fix divide by zero issues<commit_after># -*- coding: utf-8 -*-
from __future__ import division
import argparse
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
print m,n
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('integers', metavar='N', type=int, nargs='+',
help='an integer for the accumulator')
args = parser.parse_args()
print_ansi_spark(scale_data(args.integers))
|
8d8dd559252bc32388e224746f2ae8cdbdceaae4
|
masters/master.client.syzygy/master_win_official_cfg.py
|
masters/master.client.syzygy/master_win_official_cfg.py
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.scheduler import Scheduler
from buildbot.changes.filter import ChangeFilter
from master.factory import syzygy_factory
def win():
return syzygy_factory.SyzygyFactory('src/syzygy',
target_platform='win32')
def _VersionFileFilter(change):
"""A change filter function that disregards all changes that don't
touch src/syzygy/VERSION.
Args:
change: a buildbot Change object.
"""
return change.branch == 'trunk' and 'syzygy/VERSION' in change.files
#
# Official build scheduler for Syzygy
#
official_scheduler = Scheduler('syzygy_version',
treeStableTimer=0,
change_filter=ChangeFilter(
filter_fn=_VersionFileFilter),
builderNames=['Syzygy Official'])
#
# Windows official Release builder
#
official_factory = win().SyzygyFactory(official_release=True)
official_builder = {
'name': 'Syzygy Official',
'factory': official_factory,
'schedulers': 'syzygy_version',
'auto_reboot': False,
'category': 'official',
}
def Update(config, active_master, c):
c['schedulers'].append(official_scheduler)
c['builders'].append(official_builder)
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.scheduler import Scheduler
# This is due to buildbot 0.7.12 being used for the presubmit check.
from buildbot.changes.filter import ChangeFilter # pylint: disable=E0611,F0401
from master.factory import syzygy_factory
def win():
return syzygy_factory.SyzygyFactory('src/syzygy',
target_platform='win32')
def _VersionFileFilter(change):
"""A change filter function that disregards all changes that don't
touch src/syzygy/VERSION.
Args:
change: a buildbot Change object.
"""
return change.branch == 'trunk' and 'syzygy/VERSION' in change.files
#
# Official build scheduler for Syzygy
#
official_scheduler = Scheduler('syzygy_version',
treeStableTimer=0,
change_filter=ChangeFilter(
filter_fn=_VersionFileFilter),
builderNames=['Syzygy Official'])
#
# Windows official Release builder
#
official_factory = win().SyzygyFactory(official_release=True)
official_builder = {
'name': 'Syzygy Official',
'factory': official_factory,
'schedulers': 'syzygy_version',
'auto_reboot': False,
'category': 'official',
}
def Update(config, active_master, c):
c['schedulers'].append(official_scheduler)
c['builders'].append(official_builder)
|
Fix pylint presubmit check, related to buildbot 0.8.x vs 0.7.x
|
Fix pylint presubmit check, related to buildbot 0.8.x vs 0.7.x
TBR=nsylvain@chromium.org
BUG=
TEST=
Review URL: http://codereview.chromium.org/7631036
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@97254 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.scheduler import Scheduler
from buildbot.changes.filter import ChangeFilter
from master.factory import syzygy_factory
def win():
return syzygy_factory.SyzygyFactory('src/syzygy',
target_platform='win32')
def _VersionFileFilter(change):
"""A change filter function that disregards all changes that don't
touch src/syzygy/VERSION.
Args:
change: a buildbot Change object.
"""
return change.branch == 'trunk' and 'syzygy/VERSION' in change.files
#
# Official build scheduler for Syzygy
#
official_scheduler = Scheduler('syzygy_version',
treeStableTimer=0,
change_filter=ChangeFilter(
filter_fn=_VersionFileFilter),
builderNames=['Syzygy Official'])
#
# Windows official Release builder
#
official_factory = win().SyzygyFactory(official_release=True)
official_builder = {
'name': 'Syzygy Official',
'factory': official_factory,
'schedulers': 'syzygy_version',
'auto_reboot': False,
'category': 'official',
}
def Update(config, active_master, c):
c['schedulers'].append(official_scheduler)
c['builders'].append(official_builder)
Fix pylint presubmit check, related to buildbot 0.8.x vs 0.7.x
TBR=nsylvain@chromium.org
BUG=
TEST=
Review URL: http://codereview.chromium.org/7631036
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@97254 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.scheduler import Scheduler
# This is due to buildbot 0.7.12 being used for the presubmit check.
from buildbot.changes.filter import ChangeFilter # pylint: disable=E0611,F0401
from master.factory import syzygy_factory
def win():
return syzygy_factory.SyzygyFactory('src/syzygy',
target_platform='win32')
def _VersionFileFilter(change):
"""A change filter function that disregards all changes that don't
touch src/syzygy/VERSION.
Args:
change: a buildbot Change object.
"""
return change.branch == 'trunk' and 'syzygy/VERSION' in change.files
#
# Official build scheduler for Syzygy
#
official_scheduler = Scheduler('syzygy_version',
treeStableTimer=0,
change_filter=ChangeFilter(
filter_fn=_VersionFileFilter),
builderNames=['Syzygy Official'])
#
# Windows official Release builder
#
official_factory = win().SyzygyFactory(official_release=True)
official_builder = {
'name': 'Syzygy Official',
'factory': official_factory,
'schedulers': 'syzygy_version',
'auto_reboot': False,
'category': 'official',
}
def Update(config, active_master, c):
c['schedulers'].append(official_scheduler)
c['builders'].append(official_builder)
|
<commit_before># Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.scheduler import Scheduler
from buildbot.changes.filter import ChangeFilter
from master.factory import syzygy_factory
def win():
return syzygy_factory.SyzygyFactory('src/syzygy',
target_platform='win32')
def _VersionFileFilter(change):
"""A change filter function that disregards all changes that don't
touch src/syzygy/VERSION.
Args:
change: a buildbot Change object.
"""
return change.branch == 'trunk' and 'syzygy/VERSION' in change.files
#
# Official build scheduler for Syzygy
#
official_scheduler = Scheduler('syzygy_version',
treeStableTimer=0,
change_filter=ChangeFilter(
filter_fn=_VersionFileFilter),
builderNames=['Syzygy Official'])
#
# Windows official Release builder
#
official_factory = win().SyzygyFactory(official_release=True)
official_builder = {
'name': 'Syzygy Official',
'factory': official_factory,
'schedulers': 'syzygy_version',
'auto_reboot': False,
'category': 'official',
}
def Update(config, active_master, c):
c['schedulers'].append(official_scheduler)
c['builders'].append(official_builder)
<commit_msg>Fix pylint presubmit check, related to buildbot 0.8.x vs 0.7.x
TBR=nsylvain@chromium.org
BUG=
TEST=
Review URL: http://codereview.chromium.org/7631036
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@97254 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.scheduler import Scheduler
# This is due to buildbot 0.7.12 being used for the presubmit check.
from buildbot.changes.filter import ChangeFilter # pylint: disable=E0611,F0401
from master.factory import syzygy_factory
def win():
return syzygy_factory.SyzygyFactory('src/syzygy',
target_platform='win32')
def _VersionFileFilter(change):
"""A change filter function that disregards all changes that don't
touch src/syzygy/VERSION.
Args:
change: a buildbot Change object.
"""
return change.branch == 'trunk' and 'syzygy/VERSION' in change.files
#
# Official build scheduler for Syzygy
#
official_scheduler = Scheduler('syzygy_version',
treeStableTimer=0,
change_filter=ChangeFilter(
filter_fn=_VersionFileFilter),
builderNames=['Syzygy Official'])
#
# Windows official Release builder
#
official_factory = win().SyzygyFactory(official_release=True)
official_builder = {
'name': 'Syzygy Official',
'factory': official_factory,
'schedulers': 'syzygy_version',
'auto_reboot': False,
'category': 'official',
}
def Update(config, active_master, c):
c['schedulers'].append(official_scheduler)
c['builders'].append(official_builder)
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.scheduler import Scheduler
from buildbot.changes.filter import ChangeFilter
from master.factory import syzygy_factory
def win():
return syzygy_factory.SyzygyFactory('src/syzygy',
target_platform='win32')
def _VersionFileFilter(change):
"""A change filter function that disregards all changes that don't
touch src/syzygy/VERSION.
Args:
change: a buildbot Change object.
"""
return change.branch == 'trunk' and 'syzygy/VERSION' in change.files
#
# Official build scheduler for Syzygy
#
official_scheduler = Scheduler('syzygy_version',
treeStableTimer=0,
change_filter=ChangeFilter(
filter_fn=_VersionFileFilter),
builderNames=['Syzygy Official'])
#
# Windows official Release builder
#
official_factory = win().SyzygyFactory(official_release=True)
official_builder = {
'name': 'Syzygy Official',
'factory': official_factory,
'schedulers': 'syzygy_version',
'auto_reboot': False,
'category': 'official',
}
def Update(config, active_master, c):
c['schedulers'].append(official_scheduler)
c['builders'].append(official_builder)
Fix pylint presubmit check, related to buildbot 0.8.x vs 0.7.x
TBR=nsylvain@chromium.org
BUG=
TEST=
Review URL: http://codereview.chromium.org/7631036
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@97254 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.scheduler import Scheduler
# This is due to buildbot 0.7.12 being used for the presubmit check.
from buildbot.changes.filter import ChangeFilter # pylint: disable=E0611,F0401
from master.factory import syzygy_factory
def win():
return syzygy_factory.SyzygyFactory('src/syzygy',
target_platform='win32')
def _VersionFileFilter(change):
"""A change filter function that disregards all changes that don't
touch src/syzygy/VERSION.
Args:
change: a buildbot Change object.
"""
return change.branch == 'trunk' and 'syzygy/VERSION' in change.files
#
# Official build scheduler for Syzygy
#
official_scheduler = Scheduler('syzygy_version',
treeStableTimer=0,
change_filter=ChangeFilter(
filter_fn=_VersionFileFilter),
builderNames=['Syzygy Official'])
#
# Windows official Release builder
#
official_factory = win().SyzygyFactory(official_release=True)
official_builder = {
'name': 'Syzygy Official',
'factory': official_factory,
'schedulers': 'syzygy_version',
'auto_reboot': False,
'category': 'official',
}
def Update(config, active_master, c):
c['schedulers'].append(official_scheduler)
c['builders'].append(official_builder)
|
<commit_before># Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.scheduler import Scheduler
from buildbot.changes.filter import ChangeFilter
from master.factory import syzygy_factory
def win():
return syzygy_factory.SyzygyFactory('src/syzygy',
target_platform='win32')
def _VersionFileFilter(change):
"""A change filter function that disregards all changes that don't
touch src/syzygy/VERSION.
Args:
change: a buildbot Change object.
"""
return change.branch == 'trunk' and 'syzygy/VERSION' in change.files
#
# Official build scheduler for Syzygy
#
official_scheduler = Scheduler('syzygy_version',
treeStableTimer=0,
change_filter=ChangeFilter(
filter_fn=_VersionFileFilter),
builderNames=['Syzygy Official'])
#
# Windows official Release builder
#
official_factory = win().SyzygyFactory(official_release=True)
official_builder = {
'name': 'Syzygy Official',
'factory': official_factory,
'schedulers': 'syzygy_version',
'auto_reboot': False,
'category': 'official',
}
def Update(config, active_master, c):
c['schedulers'].append(official_scheduler)
c['builders'].append(official_builder)
<commit_msg>Fix pylint presubmit check, related to buildbot 0.8.x vs 0.7.x
TBR=nsylvain@chromium.org
BUG=
TEST=
Review URL: http://codereview.chromium.org/7631036
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@97254 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.scheduler import Scheduler
# This is due to buildbot 0.7.12 being used for the presubmit check.
from buildbot.changes.filter import ChangeFilter # pylint: disable=E0611,F0401
from master.factory import syzygy_factory
def win():
return syzygy_factory.SyzygyFactory('src/syzygy',
target_platform='win32')
def _VersionFileFilter(change):
"""A change filter function that disregards all changes that don't
touch src/syzygy/VERSION.
Args:
change: a buildbot Change object.
"""
return change.branch == 'trunk' and 'syzygy/VERSION' in change.files
#
# Official build scheduler for Syzygy
#
official_scheduler = Scheduler('syzygy_version',
treeStableTimer=0,
change_filter=ChangeFilter(
filter_fn=_VersionFileFilter),
builderNames=['Syzygy Official'])
#
# Windows official Release builder
#
official_factory = win().SyzygyFactory(official_release=True)
official_builder = {
'name': 'Syzygy Official',
'factory': official_factory,
'schedulers': 'syzygy_version',
'auto_reboot': False,
'category': 'official',
}
def Update(config, active_master, c):
c['schedulers'].append(official_scheduler)
c['builders'].append(official_builder)
|
b14d893c68a6c1117f01b7d5712dacd8d5ca8cf9
|
prolog/builtin/sourcehelper.py
|
prolog/builtin/sourcehelper.py
|
import os
import sys
import py
from prolog.interpreter.error import throw_existence_error
path = os.path.dirname(__file__)
path = os.path.join(path, "..", "prolog_modules")
def get_source(filename):
try:
fd = os.open(filename, os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename), os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(filename + ".pl", os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename + ".pl"), os.O_RDONLY, 0777)
except OSError, e:
throw_existence_error("source_sink", filename)
assert 0, "unreachable" # make the flow space happy
try:
content = []
while 1:
s = os.read(fd, 4096)
if not s:
break
content.append(s)
file_content = "".join(content)
finally:
os.close(fd)
return file_content
|
import os
import sys
from prolog.interpreter.error import throw_existence_error
from prolog.interpreter.term import Callable
path = os.path.dirname(__file__)
path = os.path.join(path, "..", "prolog_modules")
def get_source(filename):
try:
fd = os.open(filename, os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename), os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(filename + ".pl", os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename + ".pl"), os.O_RDONLY, 0777)
except OSError, e:
throw_existence_error("source_sink", Callable.build(filename))
assert 0, "unreachable" # make the flow space happy
try:
content = []
while 1:
s = os.read(fd, 4096)
if not s:
break
content.append(s)
file_content = "".join(content)
finally:
os.close(fd)
return file_content
|
Make atom from filename before throwing an error in get_source.
|
Make atom from filename before throwing an error in get_source.
|
Python
|
mit
|
cosmoharrigan/pyrolog
|
import os
import sys
import py
from prolog.interpreter.error import throw_existence_error
path = os.path.dirname(__file__)
path = os.path.join(path, "..", "prolog_modules")
def get_source(filename):
try:
fd = os.open(filename, os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename), os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(filename + ".pl", os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename + ".pl"), os.O_RDONLY, 0777)
except OSError, e:
throw_existence_error("source_sink", filename)
assert 0, "unreachable" # make the flow space happy
try:
content = []
while 1:
s = os.read(fd, 4096)
if not s:
break
content.append(s)
file_content = "".join(content)
finally:
os.close(fd)
return file_content
Make atom from filename before throwing an error in get_source.
|
import os
import sys
from prolog.interpreter.error import throw_existence_error
from prolog.interpreter.term import Callable
path = os.path.dirname(__file__)
path = os.path.join(path, "..", "prolog_modules")
def get_source(filename):
try:
fd = os.open(filename, os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename), os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(filename + ".pl", os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename + ".pl"), os.O_RDONLY, 0777)
except OSError, e:
throw_existence_error("source_sink", Callable.build(filename))
assert 0, "unreachable" # make the flow space happy
try:
content = []
while 1:
s = os.read(fd, 4096)
if not s:
break
content.append(s)
file_content = "".join(content)
finally:
os.close(fd)
return file_content
|
<commit_before>import os
import sys
import py
from prolog.interpreter.error import throw_existence_error
path = os.path.dirname(__file__)
path = os.path.join(path, "..", "prolog_modules")
def get_source(filename):
try:
fd = os.open(filename, os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename), os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(filename + ".pl", os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename + ".pl"), os.O_RDONLY, 0777)
except OSError, e:
throw_existence_error("source_sink", filename)
assert 0, "unreachable" # make the flow space happy
try:
content = []
while 1:
s = os.read(fd, 4096)
if not s:
break
content.append(s)
file_content = "".join(content)
finally:
os.close(fd)
return file_content
<commit_msg>Make atom from filename before throwing an error in get_source.<commit_after>
|
import os
import sys
from prolog.interpreter.error import throw_existence_error
from prolog.interpreter.term import Callable
path = os.path.dirname(__file__)
path = os.path.join(path, "..", "prolog_modules")
def get_source(filename):
try:
fd = os.open(filename, os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename), os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(filename + ".pl", os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename + ".pl"), os.O_RDONLY, 0777)
except OSError, e:
throw_existence_error("source_sink", Callable.build(filename))
assert 0, "unreachable" # make the flow space happy
try:
content = []
while 1:
s = os.read(fd, 4096)
if not s:
break
content.append(s)
file_content = "".join(content)
finally:
os.close(fd)
return file_content
|
import os
import sys
import py
from prolog.interpreter.error import throw_existence_error
path = os.path.dirname(__file__)
path = os.path.join(path, "..", "prolog_modules")
def get_source(filename):
try:
fd = os.open(filename, os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename), os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(filename + ".pl", os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename + ".pl"), os.O_RDONLY, 0777)
except OSError, e:
throw_existence_error("source_sink", filename)
assert 0, "unreachable" # make the flow space happy
try:
content = []
while 1:
s = os.read(fd, 4096)
if not s:
break
content.append(s)
file_content = "".join(content)
finally:
os.close(fd)
return file_content
Make atom from filename before throwing an error in get_source.import os
import sys
from prolog.interpreter.error import throw_existence_error
from prolog.interpreter.term import Callable
path = os.path.dirname(__file__)
path = os.path.join(path, "..", "prolog_modules")
def get_source(filename):
try:
fd = os.open(filename, os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename), os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(filename + ".pl", os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename + ".pl"), os.O_RDONLY, 0777)
except OSError, e:
throw_existence_error("source_sink", Callable.build(filename))
assert 0, "unreachable" # make the flow space happy
try:
content = []
while 1:
s = os.read(fd, 4096)
if not s:
break
content.append(s)
file_content = "".join(content)
finally:
os.close(fd)
return file_content
|
<commit_before>import os
import sys
import py
from prolog.interpreter.error import throw_existence_error
path = os.path.dirname(__file__)
path = os.path.join(path, "..", "prolog_modules")
def get_source(filename):
try:
fd = os.open(filename, os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename), os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(filename + ".pl", os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename + ".pl"), os.O_RDONLY, 0777)
except OSError, e:
throw_existence_error("source_sink", filename)
assert 0, "unreachable" # make the flow space happy
try:
content = []
while 1:
s = os.read(fd, 4096)
if not s:
break
content.append(s)
file_content = "".join(content)
finally:
os.close(fd)
return file_content
<commit_msg>Make atom from filename before throwing an error in get_source.<commit_after>import os
import sys
from prolog.interpreter.error import throw_existence_error
from prolog.interpreter.term import Callable
path = os.path.dirname(__file__)
path = os.path.join(path, "..", "prolog_modules")
def get_source(filename):
try:
fd = os.open(filename, os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename), os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(filename + ".pl", os.O_RDONLY, 0777)
except OSError, e:
try:
fd = os.open(os.path.join(path, filename + ".pl"), os.O_RDONLY, 0777)
except OSError, e:
throw_existence_error("source_sink", Callable.build(filename))
assert 0, "unreachable" # make the flow space happy
try:
content = []
while 1:
s = os.read(fd, 4096)
if not s:
break
content.append(s)
file_content = "".join(content)
finally:
os.close(fd)
return file_content
|
0dabc858976197459cfe71fe1a4a8a85c181db75
|
django_localflavor_ie/ie_counties.py
|
django_localflavor_ie/ie_counties.py
|
"""
Sources:
Irish Counties: http://en.wikipedia.org/wiki/Counties_of_Ireland
"""
from django.utils.translation import ugettext_lazy as _
IE_COUNTY_CHOICES = (
('antrim', _('Antrim')),
('armagh', _('Armagh')),
('carlow', _('Carlow')),
('cavan', _('Cavan')),
('clare', _('Clare')),
('cork', _('Cork')),
('derry', _('Derry')),
('donegal', _('Donegal')),
('down', _('Down')),
('dublin', _('Dublin')),
('fermanagh', _('Fermanagh')),
('galway', _('Galway')),
('kerry', _('Kerry')),
('kildare', _('Kildare')),
('kilkenny', _('Kilkenny')),
('laois', _('Laois')),
('leitrim', _('Leitrim')),
('limerick', _('Limerick')),
('longford', _('Longford')),
('louth', _('Louth')),
('mayo', _('Mayo')),
('meath', _('Meath')),
('monaghan', _('Monaghan')),
('offaly', _('Offaly')),
('roscommon', _('Roscommon')),
('sligo', _('Sligo')),
('tipperary', _('Tipperary')),
('tyrone', _('Tyrone')),
('waterford', _('Waterford')),
('westmeath', _('Westmeath')),
('wexford', _('Wexford')),
('wicklow', _('Wicklow')),
)
|
"""
Sources:
Irish Counties: http://en.wikipedia.org/wiki/Counties_of_Ireland
"""
from django.utils.translation import ugettext_lazy as _
IE_COUNTY_CHOICES = (
('carlow', _('Carlow')),
('cavan', _('Cavan')),
('clare', _('Clare')),
('cork', _('Cork')),
('donegal', _('Donegal')),
('dublin', _('Dublin')),
('galway', _('Galway')),
('kerry', _('Kerry')),
('kildare', _('Kildare')),
('kilkenny', _('Kilkenny')),
('laois', _('Laois')),
('leitrim', _('Leitrim')),
('limerick', _('Limerick')),
('longford', _('Longford')),
('louth', _('Louth')),
('mayo', _('Mayo')),
('meath', _('Meath')),
('monaghan', _('Monaghan')),
('offaly', _('Offaly')),
('roscommon', _('Roscommon')),
('sligo', _('Sligo')),
('tipperary', _('Tipperary')),
('waterford', _('Waterford')),
('westmeath', _('Westmeath')),
('wexford', _('Wexford')),
('wicklow', _('Wicklow')),
)
|
Remove Northern Irish counties. These are part of the UK, not Ireland
|
Remove Northern Irish counties. These are part of the UK, not Ireland
|
Python
|
bsd-3-clause
|
martinogden/django-localflavor-ie
|
"""
Sources:
Irish Counties: http://en.wikipedia.org/wiki/Counties_of_Ireland
"""
from django.utils.translation import ugettext_lazy as _
IE_COUNTY_CHOICES = (
('antrim', _('Antrim')),
('armagh', _('Armagh')),
('carlow', _('Carlow')),
('cavan', _('Cavan')),
('clare', _('Clare')),
('cork', _('Cork')),
('derry', _('Derry')),
('donegal', _('Donegal')),
('down', _('Down')),
('dublin', _('Dublin')),
('fermanagh', _('Fermanagh')),
('galway', _('Galway')),
('kerry', _('Kerry')),
('kildare', _('Kildare')),
('kilkenny', _('Kilkenny')),
('laois', _('Laois')),
('leitrim', _('Leitrim')),
('limerick', _('Limerick')),
('longford', _('Longford')),
('louth', _('Louth')),
('mayo', _('Mayo')),
('meath', _('Meath')),
('monaghan', _('Monaghan')),
('offaly', _('Offaly')),
('roscommon', _('Roscommon')),
('sligo', _('Sligo')),
('tipperary', _('Tipperary')),
('tyrone', _('Tyrone')),
('waterford', _('Waterford')),
('westmeath', _('Westmeath')),
('wexford', _('Wexford')),
('wicklow', _('Wicklow')),
)
Remove Northern Irish counties. These are part of the UK, not Ireland
|
"""
Sources:
Irish Counties: http://en.wikipedia.org/wiki/Counties_of_Ireland
"""
from django.utils.translation import ugettext_lazy as _
IE_COUNTY_CHOICES = (
('carlow', _('Carlow')),
('cavan', _('Cavan')),
('clare', _('Clare')),
('cork', _('Cork')),
('donegal', _('Donegal')),
('dublin', _('Dublin')),
('galway', _('Galway')),
('kerry', _('Kerry')),
('kildare', _('Kildare')),
('kilkenny', _('Kilkenny')),
('laois', _('Laois')),
('leitrim', _('Leitrim')),
('limerick', _('Limerick')),
('longford', _('Longford')),
('louth', _('Louth')),
('mayo', _('Mayo')),
('meath', _('Meath')),
('monaghan', _('Monaghan')),
('offaly', _('Offaly')),
('roscommon', _('Roscommon')),
('sligo', _('Sligo')),
('tipperary', _('Tipperary')),
('waterford', _('Waterford')),
('westmeath', _('Westmeath')),
('wexford', _('Wexford')),
('wicklow', _('Wicklow')),
)
|
<commit_before>"""
Sources:
Irish Counties: http://en.wikipedia.org/wiki/Counties_of_Ireland
"""
from django.utils.translation import ugettext_lazy as _
IE_COUNTY_CHOICES = (
('antrim', _('Antrim')),
('armagh', _('Armagh')),
('carlow', _('Carlow')),
('cavan', _('Cavan')),
('clare', _('Clare')),
('cork', _('Cork')),
('derry', _('Derry')),
('donegal', _('Donegal')),
('down', _('Down')),
('dublin', _('Dublin')),
('fermanagh', _('Fermanagh')),
('galway', _('Galway')),
('kerry', _('Kerry')),
('kildare', _('Kildare')),
('kilkenny', _('Kilkenny')),
('laois', _('Laois')),
('leitrim', _('Leitrim')),
('limerick', _('Limerick')),
('longford', _('Longford')),
('louth', _('Louth')),
('mayo', _('Mayo')),
('meath', _('Meath')),
('monaghan', _('Monaghan')),
('offaly', _('Offaly')),
('roscommon', _('Roscommon')),
('sligo', _('Sligo')),
('tipperary', _('Tipperary')),
('tyrone', _('Tyrone')),
('waterford', _('Waterford')),
('westmeath', _('Westmeath')),
('wexford', _('Wexford')),
('wicklow', _('Wicklow')),
)
<commit_msg>Remove Northern Irish counties. These are part of the UK, not Ireland<commit_after>
|
"""
Sources:
Irish Counties: http://en.wikipedia.org/wiki/Counties_of_Ireland
"""
from django.utils.translation import ugettext_lazy as _
IE_COUNTY_CHOICES = (
('carlow', _('Carlow')),
('cavan', _('Cavan')),
('clare', _('Clare')),
('cork', _('Cork')),
('donegal', _('Donegal')),
('dublin', _('Dublin')),
('galway', _('Galway')),
('kerry', _('Kerry')),
('kildare', _('Kildare')),
('kilkenny', _('Kilkenny')),
('laois', _('Laois')),
('leitrim', _('Leitrim')),
('limerick', _('Limerick')),
('longford', _('Longford')),
('louth', _('Louth')),
('mayo', _('Mayo')),
('meath', _('Meath')),
('monaghan', _('Monaghan')),
('offaly', _('Offaly')),
('roscommon', _('Roscommon')),
('sligo', _('Sligo')),
('tipperary', _('Tipperary')),
('waterford', _('Waterford')),
('westmeath', _('Westmeath')),
('wexford', _('Wexford')),
('wicklow', _('Wicklow')),
)
|
"""
Sources:
Irish Counties: http://en.wikipedia.org/wiki/Counties_of_Ireland
"""
from django.utils.translation import ugettext_lazy as _
IE_COUNTY_CHOICES = (
('antrim', _('Antrim')),
('armagh', _('Armagh')),
('carlow', _('Carlow')),
('cavan', _('Cavan')),
('clare', _('Clare')),
('cork', _('Cork')),
('derry', _('Derry')),
('donegal', _('Donegal')),
('down', _('Down')),
('dublin', _('Dublin')),
('fermanagh', _('Fermanagh')),
('galway', _('Galway')),
('kerry', _('Kerry')),
('kildare', _('Kildare')),
('kilkenny', _('Kilkenny')),
('laois', _('Laois')),
('leitrim', _('Leitrim')),
('limerick', _('Limerick')),
('longford', _('Longford')),
('louth', _('Louth')),
('mayo', _('Mayo')),
('meath', _('Meath')),
('monaghan', _('Monaghan')),
('offaly', _('Offaly')),
('roscommon', _('Roscommon')),
('sligo', _('Sligo')),
('tipperary', _('Tipperary')),
('tyrone', _('Tyrone')),
('waterford', _('Waterford')),
('westmeath', _('Westmeath')),
('wexford', _('Wexford')),
('wicklow', _('Wicklow')),
)
Remove Northern Irish counties. These are part of the UK, not Ireland"""
Sources:
Irish Counties: http://en.wikipedia.org/wiki/Counties_of_Ireland
"""
from django.utils.translation import ugettext_lazy as _
IE_COUNTY_CHOICES = (
('carlow', _('Carlow')),
('cavan', _('Cavan')),
('clare', _('Clare')),
('cork', _('Cork')),
('donegal', _('Donegal')),
('dublin', _('Dublin')),
('galway', _('Galway')),
('kerry', _('Kerry')),
('kildare', _('Kildare')),
('kilkenny', _('Kilkenny')),
('laois', _('Laois')),
('leitrim', _('Leitrim')),
('limerick', _('Limerick')),
('longford', _('Longford')),
('louth', _('Louth')),
('mayo', _('Mayo')),
('meath', _('Meath')),
('monaghan', _('Monaghan')),
('offaly', _('Offaly')),
('roscommon', _('Roscommon')),
('sligo', _('Sligo')),
('tipperary', _('Tipperary')),
('waterford', _('Waterford')),
('westmeath', _('Westmeath')),
('wexford', _('Wexford')),
('wicklow', _('Wicklow')),
)
|
<commit_before>"""
Sources:
Irish Counties: http://en.wikipedia.org/wiki/Counties_of_Ireland
"""
from django.utils.translation import ugettext_lazy as _
IE_COUNTY_CHOICES = (
('antrim', _('Antrim')),
('armagh', _('Armagh')),
('carlow', _('Carlow')),
('cavan', _('Cavan')),
('clare', _('Clare')),
('cork', _('Cork')),
('derry', _('Derry')),
('donegal', _('Donegal')),
('down', _('Down')),
('dublin', _('Dublin')),
('fermanagh', _('Fermanagh')),
('galway', _('Galway')),
('kerry', _('Kerry')),
('kildare', _('Kildare')),
('kilkenny', _('Kilkenny')),
('laois', _('Laois')),
('leitrim', _('Leitrim')),
('limerick', _('Limerick')),
('longford', _('Longford')),
('louth', _('Louth')),
('mayo', _('Mayo')),
('meath', _('Meath')),
('monaghan', _('Monaghan')),
('offaly', _('Offaly')),
('roscommon', _('Roscommon')),
('sligo', _('Sligo')),
('tipperary', _('Tipperary')),
('tyrone', _('Tyrone')),
('waterford', _('Waterford')),
('westmeath', _('Westmeath')),
('wexford', _('Wexford')),
('wicklow', _('Wicklow')),
)
<commit_msg>Remove Northern Irish counties. These are part of the UK, not Ireland<commit_after>"""
Sources:
Irish Counties: http://en.wikipedia.org/wiki/Counties_of_Ireland
"""
from django.utils.translation import ugettext_lazy as _
IE_COUNTY_CHOICES = (
('carlow', _('Carlow')),
('cavan', _('Cavan')),
('clare', _('Clare')),
('cork', _('Cork')),
('donegal', _('Donegal')),
('dublin', _('Dublin')),
('galway', _('Galway')),
('kerry', _('Kerry')),
('kildare', _('Kildare')),
('kilkenny', _('Kilkenny')),
('laois', _('Laois')),
('leitrim', _('Leitrim')),
('limerick', _('Limerick')),
('longford', _('Longford')),
('louth', _('Louth')),
('mayo', _('Mayo')),
('meath', _('Meath')),
('monaghan', _('Monaghan')),
('offaly', _('Offaly')),
('roscommon', _('Roscommon')),
('sligo', _('Sligo')),
('tipperary', _('Tipperary')),
('waterford', _('Waterford')),
('westmeath', _('Westmeath')),
('wexford', _('Wexford')),
('wicklow', _('Wicklow')),
)
|
6d6ee78d49663150f3d58855b4ea49ca3fbee62f
|
changes/api/project_build_index.py
|
changes/api/project_build_index.py
|
from __future__ import absolute_import, division, unicode_literals
from flask import Response, request
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectBuildIndexAPIView(APIView):
def _get_project(self, project_id):
project = Project.query.options(
joinedload(Project.repository, innerjoin=True),
).filter_by(slug=project_id).first()
if project is None:
project = Project.query.options(
joinedload(Project.repository),
).get(project_id)
return project
def get(self, project_id):
project = self._get_project(project_id)
if not project:
return '', 404
include_patches = request.args.get('include_patches') or '1'
queryset = Build.query.options(
joinedload(Build.project, innerjoin=True),
joinedload(Build.author),
).filter(
Build.project_id == project.id,
).order_by(Build.date_created.desc())
if include_patches == '0':
queryset = queryset.filter(
Build.patch == None, # NOQA
)
return self.paginate(queryset)
def get_stream_channels(self, project_id=None):
project = self._get_project(project_id)
if not project:
return Response(status=404)
return ['projects:{0}:builds'.format(project.id.hex)]
|
from __future__ import absolute_import, division, unicode_literals
from flask import Response, request
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectBuildIndexAPIView(APIView):
def _get_project(self, project_id):
project = Project.query.options(
joinedload(Project.repository, innerjoin=True),
).filter_by(slug=project_id).first()
if project is None:
project = Project.query.options(
joinedload(Project.repository),
).get(project_id)
return project
def get(self, project_id):
project = self._get_project(project_id)
if not project:
return '', 404
include_patches = request.args.get('include_patches') or '1'
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.project_id == project.id,
).order_by(Build.date_created.desc())
if include_patches == '0':
queryset = queryset.filter(
Build.patch == None, # NOQA
)
return self.paginate(queryset)
def get_stream_channels(self, project_id=None):
project = self._get_project(project_id)
if not project:
return Response(status=404)
return ['projects:{0}:builds'.format(project.id.hex)]
|
Add source to project build index query
|
Add source to project build index query
|
Python
|
apache-2.0
|
bowlofstew/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes
|
from __future__ import absolute_import, division, unicode_literals
from flask import Response, request
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectBuildIndexAPIView(APIView):
def _get_project(self, project_id):
project = Project.query.options(
joinedload(Project.repository, innerjoin=True),
).filter_by(slug=project_id).first()
if project is None:
project = Project.query.options(
joinedload(Project.repository),
).get(project_id)
return project
def get(self, project_id):
project = self._get_project(project_id)
if not project:
return '', 404
include_patches = request.args.get('include_patches') or '1'
queryset = Build.query.options(
joinedload(Build.project, innerjoin=True),
joinedload(Build.author),
).filter(
Build.project_id == project.id,
).order_by(Build.date_created.desc())
if include_patches == '0':
queryset = queryset.filter(
Build.patch == None, # NOQA
)
return self.paginate(queryset)
def get_stream_channels(self, project_id=None):
project = self._get_project(project_id)
if not project:
return Response(status=404)
return ['projects:{0}:builds'.format(project.id.hex)]
Add source to project build index query
|
from __future__ import absolute_import, division, unicode_literals
from flask import Response, request
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectBuildIndexAPIView(APIView):
def _get_project(self, project_id):
project = Project.query.options(
joinedload(Project.repository, innerjoin=True),
).filter_by(slug=project_id).first()
if project is None:
project = Project.query.options(
joinedload(Project.repository),
).get(project_id)
return project
def get(self, project_id):
project = self._get_project(project_id)
if not project:
return '', 404
include_patches = request.args.get('include_patches') or '1'
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.project_id == project.id,
).order_by(Build.date_created.desc())
if include_patches == '0':
queryset = queryset.filter(
Build.patch == None, # NOQA
)
return self.paginate(queryset)
def get_stream_channels(self, project_id=None):
project = self._get_project(project_id)
if not project:
return Response(status=404)
return ['projects:{0}:builds'.format(project.id.hex)]
|
<commit_before>from __future__ import absolute_import, division, unicode_literals
from flask import Response, request
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectBuildIndexAPIView(APIView):
def _get_project(self, project_id):
project = Project.query.options(
joinedload(Project.repository, innerjoin=True),
).filter_by(slug=project_id).first()
if project is None:
project = Project.query.options(
joinedload(Project.repository),
).get(project_id)
return project
def get(self, project_id):
project = self._get_project(project_id)
if not project:
return '', 404
include_patches = request.args.get('include_patches') or '1'
queryset = Build.query.options(
joinedload(Build.project, innerjoin=True),
joinedload(Build.author),
).filter(
Build.project_id == project.id,
).order_by(Build.date_created.desc())
if include_patches == '0':
queryset = queryset.filter(
Build.patch == None, # NOQA
)
return self.paginate(queryset)
def get_stream_channels(self, project_id=None):
project = self._get_project(project_id)
if not project:
return Response(status=404)
return ['projects:{0}:builds'.format(project.id.hex)]
<commit_msg>Add source to project build index query<commit_after>
|
from __future__ import absolute_import, division, unicode_literals
from flask import Response, request
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectBuildIndexAPIView(APIView):
def _get_project(self, project_id):
project = Project.query.options(
joinedload(Project.repository, innerjoin=True),
).filter_by(slug=project_id).first()
if project is None:
project = Project.query.options(
joinedload(Project.repository),
).get(project_id)
return project
def get(self, project_id):
project = self._get_project(project_id)
if not project:
return '', 404
include_patches = request.args.get('include_patches') or '1'
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.project_id == project.id,
).order_by(Build.date_created.desc())
if include_patches == '0':
queryset = queryset.filter(
Build.patch == None, # NOQA
)
return self.paginate(queryset)
def get_stream_channels(self, project_id=None):
project = self._get_project(project_id)
if not project:
return Response(status=404)
return ['projects:{0}:builds'.format(project.id.hex)]
|
from __future__ import absolute_import, division, unicode_literals
from flask import Response, request
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectBuildIndexAPIView(APIView):
def _get_project(self, project_id):
project = Project.query.options(
joinedload(Project.repository, innerjoin=True),
).filter_by(slug=project_id).first()
if project is None:
project = Project.query.options(
joinedload(Project.repository),
).get(project_id)
return project
def get(self, project_id):
project = self._get_project(project_id)
if not project:
return '', 404
include_patches = request.args.get('include_patches') or '1'
queryset = Build.query.options(
joinedload(Build.project, innerjoin=True),
joinedload(Build.author),
).filter(
Build.project_id == project.id,
).order_by(Build.date_created.desc())
if include_patches == '0':
queryset = queryset.filter(
Build.patch == None, # NOQA
)
return self.paginate(queryset)
def get_stream_channels(self, project_id=None):
project = self._get_project(project_id)
if not project:
return Response(status=404)
return ['projects:{0}:builds'.format(project.id.hex)]
Add source to project build index queryfrom __future__ import absolute_import, division, unicode_literals
from flask import Response, request
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectBuildIndexAPIView(APIView):
def _get_project(self, project_id):
project = Project.query.options(
joinedload(Project.repository, innerjoin=True),
).filter_by(slug=project_id).first()
if project is None:
project = Project.query.options(
joinedload(Project.repository),
).get(project_id)
return project
def get(self, project_id):
project = self._get_project(project_id)
if not project:
return '', 404
include_patches = request.args.get('include_patches') or '1'
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.project_id == project.id,
).order_by(Build.date_created.desc())
if include_patches == '0':
queryset = queryset.filter(
Build.patch == None, # NOQA
)
return self.paginate(queryset)
def get_stream_channels(self, project_id=None):
project = self._get_project(project_id)
if not project:
return Response(status=404)
return ['projects:{0}:builds'.format(project.id.hex)]
|
<commit_before>from __future__ import absolute_import, division, unicode_literals
from flask import Response, request
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectBuildIndexAPIView(APIView):
def _get_project(self, project_id):
project = Project.query.options(
joinedload(Project.repository, innerjoin=True),
).filter_by(slug=project_id).first()
if project is None:
project = Project.query.options(
joinedload(Project.repository),
).get(project_id)
return project
def get(self, project_id):
project = self._get_project(project_id)
if not project:
return '', 404
include_patches = request.args.get('include_patches') or '1'
queryset = Build.query.options(
joinedload(Build.project, innerjoin=True),
joinedload(Build.author),
).filter(
Build.project_id == project.id,
).order_by(Build.date_created.desc())
if include_patches == '0':
queryset = queryset.filter(
Build.patch == None, # NOQA
)
return self.paginate(queryset)
def get_stream_channels(self, project_id=None):
project = self._get_project(project_id)
if not project:
return Response(status=404)
return ['projects:{0}:builds'.format(project.id.hex)]
<commit_msg>Add source to project build index query<commit_after>from __future__ import absolute_import, division, unicode_literals
from flask import Response, request
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectBuildIndexAPIView(APIView):
def _get_project(self, project_id):
project = Project.query.options(
joinedload(Project.repository, innerjoin=True),
).filter_by(slug=project_id).first()
if project is None:
project = Project.query.options(
joinedload(Project.repository),
).get(project_id)
return project
def get(self, project_id):
project = self._get_project(project_id)
if not project:
return '', 404
include_patches = request.args.get('include_patches') or '1'
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.project_id == project.id,
).order_by(Build.date_created.desc())
if include_patches == '0':
queryset = queryset.filter(
Build.patch == None, # NOQA
)
return self.paginate(queryset)
def get_stream_channels(self, project_id=None):
project = self._get_project(project_id)
if not project:
return Response(status=404)
return ['projects:{0}:builds'.format(project.id.hex)]
|
231a40a8a8c7d7844475a381638c96ebaf3b288a
|
osOps.py
|
osOps.py
|
import os
def createFile(directoryPath, fileName):
return None
def createDirectory(directoryPath):
return None
def getFileContents(filePath):
return None
def deleteFile(filePath):
return None
def deleteDirectory(directoryPath):
return None
|
import os
def createDirectory(directoryPath):
return None
def createFile(filePath):
try:
createdFile = open(filePath, 'w+')
createdFile.close()
except IOError:
print "Error: could not create file at location: " + filePath
def getFileContents(filePath):
return None
def deleteFile(filePath):
return None
def deleteDirectory(directoryPath):
return None
|
Implement logic for file creation
|
Implement logic for file creation
|
Python
|
apache-2.0
|
AmosGarner/PyInventory
|
import os
def createFile(directoryPath, fileName):
return None
def createDirectory(directoryPath):
return None
def getFileContents(filePath):
return None
def deleteFile(filePath):
return None
def deleteDirectory(directoryPath):
return None
Implement logic for file creation
|
import os
def createDirectory(directoryPath):
return None
def createFile(filePath):
try:
createdFile = open(filePath, 'w+')
createdFile.close()
except IOError:
print "Error: could not create file at location: " + filePath
def getFileContents(filePath):
return None
def deleteFile(filePath):
return None
def deleteDirectory(directoryPath):
return None
|
<commit_before>import os
def createFile(directoryPath, fileName):
return None
def createDirectory(directoryPath):
return None
def getFileContents(filePath):
return None
def deleteFile(filePath):
return None
def deleteDirectory(directoryPath):
return None
<commit_msg>Implement logic for file creation<commit_after>
|
import os
def createDirectory(directoryPath):
return None
def createFile(filePath):
try:
createdFile = open(filePath, 'w+')
createdFile.close()
except IOError:
print "Error: could not create file at location: " + filePath
def getFileContents(filePath):
return None
def deleteFile(filePath):
return None
def deleteDirectory(directoryPath):
return None
|
import os
def createFile(directoryPath, fileName):
return None
def createDirectory(directoryPath):
return None
def getFileContents(filePath):
return None
def deleteFile(filePath):
return None
def deleteDirectory(directoryPath):
return None
Implement logic for file creationimport os
def createDirectory(directoryPath):
return None
def createFile(filePath):
try:
createdFile = open(filePath, 'w+')
createdFile.close()
except IOError:
print "Error: could not create file at location: " + filePath
def getFileContents(filePath):
return None
def deleteFile(filePath):
return None
def deleteDirectory(directoryPath):
return None
|
<commit_before>import os
def createFile(directoryPath, fileName):
return None
def createDirectory(directoryPath):
return None
def getFileContents(filePath):
return None
def deleteFile(filePath):
return None
def deleteDirectory(directoryPath):
return None
<commit_msg>Implement logic for file creation<commit_after>import os
def createDirectory(directoryPath):
return None
def createFile(filePath):
try:
createdFile = open(filePath, 'w+')
createdFile.close()
except IOError:
print "Error: could not create file at location: " + filePath
def getFileContents(filePath):
return None
def deleteFile(filePath):
return None
def deleteDirectory(directoryPath):
return None
|
560bbc0a0415b536fd6a49bbce6b2beb3f5f7219
|
src/balistos/tests/test_views.py
|
src/balistos/tests/test_views.py
|
# -*- coding: utf-8 -*-
"""Tests."""
from pyramid import testing
from balistos.testing import createTestDB
from pyramid_basemodel import Session
import unittest
class TestHome(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def test_home(self):
from balistos.views.main import home
request = testing.DummyRequest()
result = home(request)
self.assertEqual(result['name'], 'balistos')
class TestHomeFunctional(unittest.TestCase):
def setUp(self):
from balistos import configure
createTestDB()
self.config = testing.setUp()
configure(self.config)
app = self.config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
def tearDown(self):
Session.remove()
testing.tearDown()
def test_home(self):
res = self.testapp.get('/home', status=200)
self.assertIn(u'balistos!', res.body)
|
# -*- coding: utf-8 -*-
"""Tests."""
from pyramid import testing
from balistos.testing import createTestDB
from pyramid_basemodel import Session
import unittest
class TestHome(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def test_home_no_user(self):
from balistos.views.main import home
request = testing.DummyRequest()
result = home(request)
self.assertIsNone(result['username'])
class TestHomeFunctional(unittest.TestCase):
def setUp(self):
from balistos import configure
createTestDB()
self.config = testing.setUp()
configure(self.config)
app = self.config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
def tearDown(self):
Session.remove()
testing.tearDown()
def test_home(self):
res = self.testapp.get('/home', status=200)
self.assertIn(u'Join playlist', res.body)
|
Fix tests to comply with new home view
|
Fix tests to comply with new home view
|
Python
|
mit
|
ferewuz/balistos,ferewuz/balistos
|
# -*- coding: utf-8 -*-
"""Tests."""
from pyramid import testing
from balistos.testing import createTestDB
from pyramid_basemodel import Session
import unittest
class TestHome(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def test_home(self):
from balistos.views.main import home
request = testing.DummyRequest()
result = home(request)
self.assertEqual(result['name'], 'balistos')
class TestHomeFunctional(unittest.TestCase):
def setUp(self):
from balistos import configure
createTestDB()
self.config = testing.setUp()
configure(self.config)
app = self.config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
def tearDown(self):
Session.remove()
testing.tearDown()
def test_home(self):
res = self.testapp.get('/home', status=200)
self.assertIn(u'balistos!', res.body)
Fix tests to comply with new home view
|
# -*- coding: utf-8 -*-
"""Tests."""
from pyramid import testing
from balistos.testing import createTestDB
from pyramid_basemodel import Session
import unittest
class TestHome(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def test_home_no_user(self):
from balistos.views.main import home
request = testing.DummyRequest()
result = home(request)
self.assertIsNone(result['username'])
class TestHomeFunctional(unittest.TestCase):
def setUp(self):
from balistos import configure
createTestDB()
self.config = testing.setUp()
configure(self.config)
app = self.config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
def tearDown(self):
Session.remove()
testing.tearDown()
def test_home(self):
res = self.testapp.get('/home', status=200)
self.assertIn(u'Join playlist', res.body)
|
<commit_before># -*- coding: utf-8 -*-
"""Tests."""
from pyramid import testing
from balistos.testing import createTestDB
from pyramid_basemodel import Session
import unittest
class TestHome(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def test_home(self):
from balistos.views.main import home
request = testing.DummyRequest()
result = home(request)
self.assertEqual(result['name'], 'balistos')
class TestHomeFunctional(unittest.TestCase):
def setUp(self):
from balistos import configure
createTestDB()
self.config = testing.setUp()
configure(self.config)
app = self.config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
def tearDown(self):
Session.remove()
testing.tearDown()
def test_home(self):
res = self.testapp.get('/home', status=200)
self.assertIn(u'balistos!', res.body)
<commit_msg>Fix tests to comply with new home view<commit_after>
|
# -*- coding: utf-8 -*-
"""Tests."""
from pyramid import testing
from balistos.testing import createTestDB
from pyramid_basemodel import Session
import unittest
class TestHome(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def test_home_no_user(self):
from balistos.views.main import home
request = testing.DummyRequest()
result = home(request)
self.assertIsNone(result['username'])
class TestHomeFunctional(unittest.TestCase):
def setUp(self):
from balistos import configure
createTestDB()
self.config = testing.setUp()
configure(self.config)
app = self.config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
def tearDown(self):
Session.remove()
testing.tearDown()
def test_home(self):
res = self.testapp.get('/home', status=200)
self.assertIn(u'Join playlist', res.body)
|
# -*- coding: utf-8 -*-
"""Tests."""
from pyramid import testing
from balistos.testing import createTestDB
from pyramid_basemodel import Session
import unittest
class TestHome(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def test_home(self):
from balistos.views.main import home
request = testing.DummyRequest()
result = home(request)
self.assertEqual(result['name'], 'balistos')
class TestHomeFunctional(unittest.TestCase):
def setUp(self):
from balistos import configure
createTestDB()
self.config = testing.setUp()
configure(self.config)
app = self.config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
def tearDown(self):
Session.remove()
testing.tearDown()
def test_home(self):
res = self.testapp.get('/home', status=200)
self.assertIn(u'balistos!', res.body)
Fix tests to comply with new home view# -*- coding: utf-8 -*-
"""Tests."""
from pyramid import testing
from balistos.testing import createTestDB
from pyramid_basemodel import Session
import unittest
class TestHome(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def test_home_no_user(self):
from balistos.views.main import home
request = testing.DummyRequest()
result = home(request)
self.assertIsNone(result['username'])
class TestHomeFunctional(unittest.TestCase):
def setUp(self):
from balistos import configure
createTestDB()
self.config = testing.setUp()
configure(self.config)
app = self.config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
def tearDown(self):
Session.remove()
testing.tearDown()
def test_home(self):
res = self.testapp.get('/home', status=200)
self.assertIn(u'Join playlist', res.body)
|
<commit_before># -*- coding: utf-8 -*-
"""Tests."""
from pyramid import testing
from balistos.testing import createTestDB
from pyramid_basemodel import Session
import unittest
class TestHome(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def test_home(self):
from balistos.views.main import home
request = testing.DummyRequest()
result = home(request)
self.assertEqual(result['name'], 'balistos')
class TestHomeFunctional(unittest.TestCase):
def setUp(self):
from balistos import configure
createTestDB()
self.config = testing.setUp()
configure(self.config)
app = self.config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
def tearDown(self):
Session.remove()
testing.tearDown()
def test_home(self):
res = self.testapp.get('/home', status=200)
self.assertIn(u'balistos!', res.body)
<commit_msg>Fix tests to comply with new home view<commit_after># -*- coding: utf-8 -*-
"""Tests."""
from pyramid import testing
from balistos.testing import createTestDB
from pyramid_basemodel import Session
import unittest
class TestHome(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
def test_home_no_user(self):
from balistos.views.main import home
request = testing.DummyRequest()
result = home(request)
self.assertIsNone(result['username'])
class TestHomeFunctional(unittest.TestCase):
def setUp(self):
from balistos import configure
createTestDB()
self.config = testing.setUp()
configure(self.config)
app = self.config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
def tearDown(self):
Session.remove()
testing.tearDown()
def test_home(self):
res = self.testapp.get('/home', status=200)
self.assertIn(u'Join playlist', res.body)
|
f8a7939bab7803a04e28f01852b1323fe9651a31
|
zaqar_ui/version.py
|
zaqar_ui/version.py
|
import pbr.version
version_info = pbr.version.VersionInfo('zaqar-ui')
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('zaqar-ui')
|
Add Apache 2.0 license to source file
|
Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I714355371a6c57f74924efec19f12d48c7fe2d3f
|
Python
|
apache-2.0
|
openstack/zaqar-ui,openstack/zaqar-ui,openstack/zaqar-ui,openstack/zaqar-ui
|
import pbr.version
version_info = pbr.version.VersionInfo('zaqar-ui')
Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I714355371a6c57f74924efec19f12d48c7fe2d3f
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('zaqar-ui')
|
<commit_before>import pbr.version
version_info = pbr.version.VersionInfo('zaqar-ui')
<commit_msg>Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I714355371a6c57f74924efec19f12d48c7fe2d3f<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('zaqar-ui')
|
import pbr.version
version_info = pbr.version.VersionInfo('zaqar-ui')
Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I714355371a6c57f74924efec19f12d48c7fe2d3f# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('zaqar-ui')
|
<commit_before>import pbr.version
version_info = pbr.version.VersionInfo('zaqar-ui')
<commit_msg>Add Apache 2.0 license to source file
As per OpenStack licensing guide lines [1]:
[H102 H103] Newly contributed Source Code should be licensed under
the Apache 2.0 license.
[1] http://docs.openstack.org/developer/hacking/#openstack-licensing
Change-Id: I714355371a6c57f74924efec19f12d48c7fe2d3f<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('zaqar-ui')
|
b46370e025efc4730fb39c05928ff22744956eda
|
django_perf_rec/functional.py
|
django_perf_rec/functional.py
|
# -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
from functools import wraps
def kwargs_only(func):
"""
Make a function only accept keyword arguments.
This can be dropped in Python 3 in lieu of:
def foo(*, bar=default):
"""
signature = inspect.getargspec(func)
if signature.args[:1] in (['self'], ['cls']):
allowable_args = 1
else:
allowable_args = 0
@wraps(func)
def wrapper(*args, **kwargs):
if len(args) > allowable_args:
raise TypeError("{} should only be called with keyword args".format(func.__name__))
return func(*args, **kwargs)
return wrapper
|
# -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
from functools import wraps
def kwargs_only(func):
"""
Make a function only accept keyword arguments.
This can be dropped in Python 3 in lieu of:
def foo(*, bar=default):
"""
if hasattr(inspect, 'signature'):
# Python 3
signature = inspect.signature(func)
first_arg_name = list(signature.parameters.keys())[0]
else:
# Python 2
signature = inspect.getargspec(func)
first_arg_name = signature.args[0]
print(first_arg_name)
if first_arg_name in ('self', 'cls'):
allowable_args = 1
else:
allowable_args = 0
@wraps(func)
def wrapper(*args, **kwargs):
if len(args) > allowable_args:
raise TypeError("{} should only be called with keyword args".format(func.__name__))
return func(*args, **kwargs)
return wrapper
|
Fix warnings on Python 3
|
Fix warnings on Python 3
Fixes #74. Use `inspect.signature()` on Python 3 instead of the deprecated `inspect.getargspec()`.
|
Python
|
mit
|
YPlan/django-perf-rec
|
# -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
from functools import wraps
def kwargs_only(func):
"""
Make a function only accept keyword arguments.
This can be dropped in Python 3 in lieu of:
def foo(*, bar=default):
"""
signature = inspect.getargspec(func)
if signature.args[:1] in (['self'], ['cls']):
allowable_args = 1
else:
allowable_args = 0
@wraps(func)
def wrapper(*args, **kwargs):
if len(args) > allowable_args:
raise TypeError("{} should only be called with keyword args".format(func.__name__))
return func(*args, **kwargs)
return wrapper
Fix warnings on Python 3
Fixes #74. Use `inspect.signature()` on Python 3 instead of the deprecated `inspect.getargspec()`.
|
# -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
from functools import wraps
def kwargs_only(func):
"""
Make a function only accept keyword arguments.
This can be dropped in Python 3 in lieu of:
def foo(*, bar=default):
"""
if hasattr(inspect, 'signature'):
# Python 3
signature = inspect.signature(func)
first_arg_name = list(signature.parameters.keys())[0]
else:
# Python 2
signature = inspect.getargspec(func)
first_arg_name = signature.args[0]
print(first_arg_name)
if first_arg_name in ('self', 'cls'):
allowable_args = 1
else:
allowable_args = 0
@wraps(func)
def wrapper(*args, **kwargs):
if len(args) > allowable_args:
raise TypeError("{} should only be called with keyword args".format(func.__name__))
return func(*args, **kwargs)
return wrapper
|
<commit_before># -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
from functools import wraps
def kwargs_only(func):
"""
Make a function only accept keyword arguments.
This can be dropped in Python 3 in lieu of:
def foo(*, bar=default):
"""
signature = inspect.getargspec(func)
if signature.args[:1] in (['self'], ['cls']):
allowable_args = 1
else:
allowable_args = 0
@wraps(func)
def wrapper(*args, **kwargs):
if len(args) > allowable_args:
raise TypeError("{} should only be called with keyword args".format(func.__name__))
return func(*args, **kwargs)
return wrapper
<commit_msg>Fix warnings on Python 3
Fixes #74. Use `inspect.signature()` on Python 3 instead of the deprecated `inspect.getargspec()`.<commit_after>
|
# -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
from functools import wraps
def kwargs_only(func):
"""
Make a function only accept keyword arguments.
This can be dropped in Python 3 in lieu of:
def foo(*, bar=default):
"""
if hasattr(inspect, 'signature'):
# Python 3
signature = inspect.signature(func)
first_arg_name = list(signature.parameters.keys())[0]
else:
# Python 2
signature = inspect.getargspec(func)
first_arg_name = signature.args[0]
print(first_arg_name)
if first_arg_name in ('self', 'cls'):
allowable_args = 1
else:
allowable_args = 0
@wraps(func)
def wrapper(*args, **kwargs):
if len(args) > allowable_args:
raise TypeError("{} should only be called with keyword args".format(func.__name__))
return func(*args, **kwargs)
return wrapper
|
# -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
from functools import wraps
def kwargs_only(func):
"""
Make a function only accept keyword arguments.
This can be dropped in Python 3 in lieu of:
def foo(*, bar=default):
"""
signature = inspect.getargspec(func)
if signature.args[:1] in (['self'], ['cls']):
allowable_args = 1
else:
allowable_args = 0
@wraps(func)
def wrapper(*args, **kwargs):
if len(args) > allowable_args:
raise TypeError("{} should only be called with keyword args".format(func.__name__))
return func(*args, **kwargs)
return wrapper
Fix warnings on Python 3
Fixes #74. Use `inspect.signature()` on Python 3 instead of the deprecated `inspect.getargspec()`.# -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
from functools import wraps
def kwargs_only(func):
"""
Make a function only accept keyword arguments.
This can be dropped in Python 3 in lieu of:
def foo(*, bar=default):
"""
if hasattr(inspect, 'signature'):
# Python 3
signature = inspect.signature(func)
first_arg_name = list(signature.parameters.keys())[0]
else:
# Python 2
signature = inspect.getargspec(func)
first_arg_name = signature.args[0]
print(first_arg_name)
if first_arg_name in ('self', 'cls'):
allowable_args = 1
else:
allowable_args = 0
@wraps(func)
def wrapper(*args, **kwargs):
if len(args) > allowable_args:
raise TypeError("{} should only be called with keyword args".format(func.__name__))
return func(*args, **kwargs)
return wrapper
|
<commit_before># -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
from functools import wraps
def kwargs_only(func):
"""
Make a function only accept keyword arguments.
This can be dropped in Python 3 in lieu of:
def foo(*, bar=default):
"""
signature = inspect.getargspec(func)
if signature.args[:1] in (['self'], ['cls']):
allowable_args = 1
else:
allowable_args = 0
@wraps(func)
def wrapper(*args, **kwargs):
if len(args) > allowable_args:
raise TypeError("{} should only be called with keyword args".format(func.__name__))
return func(*args, **kwargs)
return wrapper
<commit_msg>Fix warnings on Python 3
Fixes #74. Use `inspect.signature()` on Python 3 instead of the deprecated `inspect.getargspec()`.<commit_after># -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
from functools import wraps
def kwargs_only(func):
"""
Make a function only accept keyword arguments.
This can be dropped in Python 3 in lieu of:
def foo(*, bar=default):
"""
if hasattr(inspect, 'signature'):
# Python 3
signature = inspect.signature(func)
first_arg_name = list(signature.parameters.keys())[0]
else:
# Python 2
signature = inspect.getargspec(func)
first_arg_name = signature.args[0]
print(first_arg_name)
if first_arg_name in ('self', 'cls'):
allowable_args = 1
else:
allowable_args = 0
@wraps(func)
def wrapper(*args, **kwargs):
if len(args) > allowable_args:
raise TypeError("{} should only be called with keyword args".format(func.__name__))
return func(*args, **kwargs)
return wrapper
|
ae5e35aefd5b508fa2a0d1ed7d0ceefd9d24eb27
|
17B-162/spw_setup.py
|
17B-162/spw_setup.py
|
# Line SPW setup for 17B-162 w/ rest frequencies
linespw_dict = {0: ["HI", "1.420405752GHz"],
1: ["H166alp", "1.42473GHz"],
2: ["H164alp", "1.47734GHz"],
3: ["OH1612", "1.612231GHz"],
4: ["H158alp", "1.65154GHz"],
5: ["OH1665", "1.6654018GHz"],
6: ["OH1667", "1.667359GHz"],
7: ["OH1720", "1.72053GHz"],
8: ["H153alp", "1.81825GHz"],
9: ["H152alp", "1.85425GHz"]}
|
# Line SPW setup for 17B-162 w/ rest frequencies
linespw_dict = {0: ["HI", "1.420405752GHz", 4096],
1: ["H166alp", "1.42473GHz", 128],
2: ["H164alp", "1.47734GHz", 128],
3: ["OH1612", "1.612231GHz", 256],
4: ["H158alp", "1.65154GHz", 128],
5: ["OH1665", "1.6654018GHz", 256],
6: ["OH1667", "1.667359GHz", 256],
7: ["OH1720", "1.72053GHz", 256],
8: ["H153alp", "1.81825GHz", 128],
9: ["H152alp", "1.85425GHz", 128]}
|
Add the number of channels in each SPW
|
Add the number of channels in each SPW
|
Python
|
mit
|
e-koch/VLA_Lband,e-koch/VLA_Lband
|
# Line SPW setup for 17B-162 w/ rest frequencies
linespw_dict = {0: ["HI", "1.420405752GHz"],
1: ["H166alp", "1.42473GHz"],
2: ["H164alp", "1.47734GHz"],
3: ["OH1612", "1.612231GHz"],
4: ["H158alp", "1.65154GHz"],
5: ["OH1665", "1.6654018GHz"],
6: ["OH1667", "1.667359GHz"],
7: ["OH1720", "1.72053GHz"],
8: ["H153alp", "1.81825GHz"],
9: ["H152alp", "1.85425GHz"]}
Add the number of channels in each SPW
|
# Line SPW setup for 17B-162 w/ rest frequencies
linespw_dict = {0: ["HI", "1.420405752GHz", 4096],
1: ["H166alp", "1.42473GHz", 128],
2: ["H164alp", "1.47734GHz", 128],
3: ["OH1612", "1.612231GHz", 256],
4: ["H158alp", "1.65154GHz", 128],
5: ["OH1665", "1.6654018GHz", 256],
6: ["OH1667", "1.667359GHz", 256],
7: ["OH1720", "1.72053GHz", 256],
8: ["H153alp", "1.81825GHz", 128],
9: ["H152alp", "1.85425GHz", 128]}
|
<commit_before>
# Line SPW setup for 17B-162 w/ rest frequencies
linespw_dict = {0: ["HI", "1.420405752GHz"],
1: ["H166alp", "1.42473GHz"],
2: ["H164alp", "1.47734GHz"],
3: ["OH1612", "1.612231GHz"],
4: ["H158alp", "1.65154GHz"],
5: ["OH1665", "1.6654018GHz"],
6: ["OH1667", "1.667359GHz"],
7: ["OH1720", "1.72053GHz"],
8: ["H153alp", "1.81825GHz"],
9: ["H152alp", "1.85425GHz"]}
<commit_msg>Add the number of channels in each SPW<commit_after>
|
# Line SPW setup for 17B-162 w/ rest frequencies
linespw_dict = {0: ["HI", "1.420405752GHz", 4096],
1: ["H166alp", "1.42473GHz", 128],
2: ["H164alp", "1.47734GHz", 128],
3: ["OH1612", "1.612231GHz", 256],
4: ["H158alp", "1.65154GHz", 128],
5: ["OH1665", "1.6654018GHz", 256],
6: ["OH1667", "1.667359GHz", 256],
7: ["OH1720", "1.72053GHz", 256],
8: ["H153alp", "1.81825GHz", 128],
9: ["H152alp", "1.85425GHz", 128]}
|
# Line SPW setup for 17B-162 w/ rest frequencies
linespw_dict = {0: ["HI", "1.420405752GHz"],
1: ["H166alp", "1.42473GHz"],
2: ["H164alp", "1.47734GHz"],
3: ["OH1612", "1.612231GHz"],
4: ["H158alp", "1.65154GHz"],
5: ["OH1665", "1.6654018GHz"],
6: ["OH1667", "1.667359GHz"],
7: ["OH1720", "1.72053GHz"],
8: ["H153alp", "1.81825GHz"],
9: ["H152alp", "1.85425GHz"]}
Add the number of channels in each SPW
# Line SPW setup for 17B-162 w/ rest frequencies
linespw_dict = {0: ["HI", "1.420405752GHz", 4096],
1: ["H166alp", "1.42473GHz", 128],
2: ["H164alp", "1.47734GHz", 128],
3: ["OH1612", "1.612231GHz", 256],
4: ["H158alp", "1.65154GHz", 128],
5: ["OH1665", "1.6654018GHz", 256],
6: ["OH1667", "1.667359GHz", 256],
7: ["OH1720", "1.72053GHz", 256],
8: ["H153alp", "1.81825GHz", 128],
9: ["H152alp", "1.85425GHz", 128]}
|
<commit_before>
# Line SPW setup for 17B-162 w/ rest frequencies
linespw_dict = {0: ["HI", "1.420405752GHz"],
1: ["H166alp", "1.42473GHz"],
2: ["H164alp", "1.47734GHz"],
3: ["OH1612", "1.612231GHz"],
4: ["H158alp", "1.65154GHz"],
5: ["OH1665", "1.6654018GHz"],
6: ["OH1667", "1.667359GHz"],
7: ["OH1720", "1.72053GHz"],
8: ["H153alp", "1.81825GHz"],
9: ["H152alp", "1.85425GHz"]}
<commit_msg>Add the number of channels in each SPW<commit_after>
# Line SPW setup for 17B-162 w/ rest frequencies
linespw_dict = {0: ["HI", "1.420405752GHz", 4096],
1: ["H166alp", "1.42473GHz", 128],
2: ["H164alp", "1.47734GHz", 128],
3: ["OH1612", "1.612231GHz", 256],
4: ["H158alp", "1.65154GHz", 128],
5: ["OH1665", "1.6654018GHz", 256],
6: ["OH1667", "1.667359GHz", 256],
7: ["OH1720", "1.72053GHz", 256],
8: ["H153alp", "1.81825GHz", 128],
9: ["H152alp", "1.85425GHz", 128]}
|
3cc82eeb8400d182461467b4e2d8ec3c7fc487cb
|
config.py
|
config.py
|
import os
import ConfigParser
os.environ['PYTHONINSPECT'] = 'True'
basedir = os.path.abspath(os.path.dirname(__file__))
parser = ConfigParser.ConfigParser()
parser.read("secret_config.cfg")
class Default:
PORT = 8080
API_KEY = parser.get("github", "api_key")
CACHE_TYPE = 'memcached'
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("develop", "db_password").strip('"') + \
'@localhost/atom-website-develop'
#SQLALCHEMY_DATABASE_URI = 'mysql://root:qaswqasw@localhost/atom-website'
class Developement(Default):
DEBUG = True
class Testing(Default):
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("test", "db_password").strip('"') + '@localhost/atom-website-test'
class Production(Default):
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("production", "db_password").strip('"') + \
'@localhost/atom'
config = {
'DEFAULT': Default,
'DEVELOPMENT': Developement,
'TESTING': Testing,
'PRODUCTION': Production
}
|
import os
import ConfigParser
os.environ['PYTHONINSPECT'] = 'True'
basedir = os.path.abspath(os.path.dirname(__file__))
parser = ConfigParser.ConfigParser()
parser.read(os.path.join(basedir, "secret_config.cfg"))
class Default:
PORT = 8080
API_KEY = parser.get("github", "api_key")
CACHE_TYPE = 'memcached'
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("develop", "db_password").strip('"') + \
'@localhost/atom-website-develop'
#SQLALCHEMY_DATABASE_URI = 'mysql://root:qaswqasw@localhost/atom-website'
class Developement(Default):
DEBUG = True
class Testing(Default):
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("test", "db_password").strip('"') + '@localhost/atom-website-test'
class Production(Default):
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("production", "db_password").strip('"') + \
'@localhost/atom'
config = {
'DEFAULT': Default,
'DEVELOPMENT': Developement,
'TESTING': Testing,
'PRODUCTION': Production
}
|
Fix path read error for secret_cfg
|
Fix path read error for secret_cfg
|
Python
|
bsd-2-clause
|
NikhilKalige/atom-website,NikhilKalige/atom-website,NikhilKalige/atom-website
|
import os
import ConfigParser
os.environ['PYTHONINSPECT'] = 'True'
basedir = os.path.abspath(os.path.dirname(__file__))
parser = ConfigParser.ConfigParser()
parser.read("secret_config.cfg")
class Default:
PORT = 8080
API_KEY = parser.get("github", "api_key")
CACHE_TYPE = 'memcached'
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("develop", "db_password").strip('"') + \
'@localhost/atom-website-develop'
#SQLALCHEMY_DATABASE_URI = 'mysql://root:qaswqasw@localhost/atom-website'
class Developement(Default):
DEBUG = True
class Testing(Default):
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("test", "db_password").strip('"') + '@localhost/atom-website-test'
class Production(Default):
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("production", "db_password").strip('"') + \
'@localhost/atom'
config = {
'DEFAULT': Default,
'DEVELOPMENT': Developement,
'TESTING': Testing,
'PRODUCTION': Production
}
Fix path read error for secret_cfg
|
import os
import ConfigParser
os.environ['PYTHONINSPECT'] = 'True'
basedir = os.path.abspath(os.path.dirname(__file__))
parser = ConfigParser.ConfigParser()
parser.read(os.path.join(basedir, "secret_config.cfg"))
class Default:
PORT = 8080
API_KEY = parser.get("github", "api_key")
CACHE_TYPE = 'memcached'
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("develop", "db_password").strip('"') + \
'@localhost/atom-website-develop'
#SQLALCHEMY_DATABASE_URI = 'mysql://root:qaswqasw@localhost/atom-website'
class Developement(Default):
DEBUG = True
class Testing(Default):
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("test", "db_password").strip('"') + '@localhost/atom-website-test'
class Production(Default):
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("production", "db_password").strip('"') + \
'@localhost/atom'
config = {
'DEFAULT': Default,
'DEVELOPMENT': Developement,
'TESTING': Testing,
'PRODUCTION': Production
}
|
<commit_before>import os
import ConfigParser
os.environ['PYTHONINSPECT'] = 'True'
basedir = os.path.abspath(os.path.dirname(__file__))
parser = ConfigParser.ConfigParser()
parser.read("secret_config.cfg")
class Default:
PORT = 8080
API_KEY = parser.get("github", "api_key")
CACHE_TYPE = 'memcached'
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("develop", "db_password").strip('"') + \
'@localhost/atom-website-develop'
#SQLALCHEMY_DATABASE_URI = 'mysql://root:qaswqasw@localhost/atom-website'
class Developement(Default):
DEBUG = True
class Testing(Default):
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("test", "db_password").strip('"') + '@localhost/atom-website-test'
class Production(Default):
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("production", "db_password").strip('"') + \
'@localhost/atom'
config = {
'DEFAULT': Default,
'DEVELOPMENT': Developement,
'TESTING': Testing,
'PRODUCTION': Production
}
<commit_msg>Fix path read error for secret_cfg<commit_after>
|
import os
import ConfigParser
os.environ['PYTHONINSPECT'] = 'True'
basedir = os.path.abspath(os.path.dirname(__file__))
parser = ConfigParser.ConfigParser()
parser.read(os.path.join(basedir, "secret_config.cfg"))
class Default:
PORT = 8080
API_KEY = parser.get("github", "api_key")
CACHE_TYPE = 'memcached'
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("develop", "db_password").strip('"') + \
'@localhost/atom-website-develop'
#SQLALCHEMY_DATABASE_URI = 'mysql://root:qaswqasw@localhost/atom-website'
class Developement(Default):
DEBUG = True
class Testing(Default):
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("test", "db_password").strip('"') + '@localhost/atom-website-test'
class Production(Default):
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("production", "db_password").strip('"') + \
'@localhost/atom'
config = {
'DEFAULT': Default,
'DEVELOPMENT': Developement,
'TESTING': Testing,
'PRODUCTION': Production
}
|
import os
import ConfigParser
os.environ['PYTHONINSPECT'] = 'True'
basedir = os.path.abspath(os.path.dirname(__file__))
parser = ConfigParser.ConfigParser()
parser.read("secret_config.cfg")
class Default:
PORT = 8080
API_KEY = parser.get("github", "api_key")
CACHE_TYPE = 'memcached'
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("develop", "db_password").strip('"') + \
'@localhost/atom-website-develop'
#SQLALCHEMY_DATABASE_URI = 'mysql://root:qaswqasw@localhost/atom-website'
class Developement(Default):
DEBUG = True
class Testing(Default):
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("test", "db_password").strip('"') + '@localhost/atom-website-test'
class Production(Default):
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("production", "db_password").strip('"') + \
'@localhost/atom'
config = {
'DEFAULT': Default,
'DEVELOPMENT': Developement,
'TESTING': Testing,
'PRODUCTION': Production
}
Fix path read error for secret_cfgimport os
import ConfigParser
os.environ['PYTHONINSPECT'] = 'True'
basedir = os.path.abspath(os.path.dirname(__file__))
parser = ConfigParser.ConfigParser()
parser.read(os.path.join(basedir, "secret_config.cfg"))
class Default:
PORT = 8080
API_KEY = parser.get("github", "api_key")
CACHE_TYPE = 'memcached'
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("develop", "db_password").strip('"') + \
'@localhost/atom-website-develop'
#SQLALCHEMY_DATABASE_URI = 'mysql://root:qaswqasw@localhost/atom-website'
class Developement(Default):
DEBUG = True
class Testing(Default):
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("test", "db_password").strip('"') + '@localhost/atom-website-test'
class Production(Default):
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("production", "db_password").strip('"') + \
'@localhost/atom'
config = {
'DEFAULT': Default,
'DEVELOPMENT': Developement,
'TESTING': Testing,
'PRODUCTION': Production
}
|
<commit_before>import os
import ConfigParser
os.environ['PYTHONINSPECT'] = 'True'
basedir = os.path.abspath(os.path.dirname(__file__))
parser = ConfigParser.ConfigParser()
parser.read("secret_config.cfg")
class Default:
PORT = 8080
API_KEY = parser.get("github", "api_key")
CACHE_TYPE = 'memcached'
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("develop", "db_password").strip('"') + \
'@localhost/atom-website-develop'
#SQLALCHEMY_DATABASE_URI = 'mysql://root:qaswqasw@localhost/atom-website'
class Developement(Default):
DEBUG = True
class Testing(Default):
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("test", "db_password").strip('"') + '@localhost/atom-website-test'
class Production(Default):
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("production", "db_password").strip('"') + \
'@localhost/atom'
config = {
'DEFAULT': Default,
'DEVELOPMENT': Developement,
'TESTING': Testing,
'PRODUCTION': Production
}
<commit_msg>Fix path read error for secret_cfg<commit_after>import os
import ConfigParser
os.environ['PYTHONINSPECT'] = 'True'
basedir = os.path.abspath(os.path.dirname(__file__))
parser = ConfigParser.ConfigParser()
parser.read(os.path.join(basedir, "secret_config.cfg"))
class Default:
PORT = 8080
API_KEY = parser.get("github", "api_key")
CACHE_TYPE = 'memcached'
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("develop", "db_password").strip('"') + \
'@localhost/atom-website-develop'
#SQLALCHEMY_DATABASE_URI = 'mysql://root:qaswqasw@localhost/atom-website'
class Developement(Default):
DEBUG = True
class Testing(Default):
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("test", "db_password").strip('"') + '@localhost/atom-website-test'
class Production(Default):
SQLALCHEMY_DATABASE_URI = 'mysql://root:' + \
parser.get("production", "db_password").strip('"') + \
'@localhost/atom'
config = {
'DEFAULT': Default,
'DEVELOPMENT': Developement,
'TESTING': Testing,
'PRODUCTION': Production
}
|
1970a6ddbd3b1a891b0c420498f51ad186a4ba7b
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10-pre3',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
|
#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10_pre4',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
|
Make 0.9.10_pre4 to match the webapp
|
Make 0.9.10_pre4 to match the webapp
|
Python
|
apache-2.0
|
kerlandsson/whisper,cbowman0/whisper,jjneely/whisper,obfuscurity/whisper,penpen/whisper,alexandreboisvert/whisper,deniszh/whisper,graphite-server/whisper,akbooer/whisper,graphite-project/whisper,acdha/whisper,piotr1212/whisper
|
#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10-pre3',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
Make 0.9.10_pre4 to match the webapp
|
#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10_pre4',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
|
<commit_before>#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10-pre3',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
<commit_msg>Make 0.9.10_pre4 to match the webapp<commit_after>
|
#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10_pre4',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
|
#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10-pre3',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
Make 0.9.10_pre4 to match the webapp#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10_pre4',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
|
<commit_before>#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10-pre3',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
<commit_msg>Make 0.9.10_pre4 to match the webapp<commit_after>#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10_pre4',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
|
b42896e796e6f4d2984547a34978bb34c66ba749
|
blanc_basic_news/news/views.py
|
blanc_basic_news/news/views.py
|
from django.views.generic import ListView, MonthArchiveView, DateDetailView
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.conf import settings
from .models import Category, Post
class PostListView(ListView):
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
def get_queryset(self):
return Post.objects.select_related().filter(
published=True, date__lte=timezone.now())
class PostListCategoryView(ListView):
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
template_name_suffix = '_list_category'
def get_queryset(self):
self.category = get_object_or_404(Category, slug=self.kwargs['slug'])
return Post.objects.select_related().filter(
published=True,
date__lte=timezone.now(),
category=self.category)
def get_context_data(self, **kwargs):
context = super(PostListCategoryView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
class PostListMonthView(MonthArchiveView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
class PostDetailView(DateDetailView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
|
from django.views.generic import ArchiveIndexView, MonthArchiveView, DateDetailView
from django.shortcuts import get_object_or_404
from django.conf import settings
from .models import Category, Post
class PostListView(ArchiveIndexView):
queryset = Post.objects.select_related().filter(published=True)
date_field = 'date'
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
template_name_suffix = '_list'
context_object_name = 'object_list'
class PostListCategoryView(PostListView):
template_name_suffix = '_list_category'
def get_queryset(self):
qs = super(PostListCategoryView, self).get_queryset()
self.category = get_object_or_404(Category, slug=self.kwargs['slug'])
return qs.filter(category=self.category)
def get_context_data(self, **kwargs):
context = super(PostListCategoryView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
class PostListMonthView(MonthArchiveView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
class PostDetailView(DateDetailView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
|
Use ArchiveIndexView to reduce code
|
Use ArchiveIndexView to reduce code
|
Python
|
bsd-3-clause
|
blancltd/blanc-basic-news
|
from django.views.generic import ListView, MonthArchiveView, DateDetailView
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.conf import settings
from .models import Category, Post
class PostListView(ListView):
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
def get_queryset(self):
return Post.objects.select_related().filter(
published=True, date__lte=timezone.now())
class PostListCategoryView(ListView):
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
template_name_suffix = '_list_category'
def get_queryset(self):
self.category = get_object_or_404(Category, slug=self.kwargs['slug'])
return Post.objects.select_related().filter(
published=True,
date__lte=timezone.now(),
category=self.category)
def get_context_data(self, **kwargs):
context = super(PostListCategoryView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
class PostListMonthView(MonthArchiveView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
class PostDetailView(DateDetailView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
Use ArchiveIndexView to reduce code
|
from django.views.generic import ArchiveIndexView, MonthArchiveView, DateDetailView
from django.shortcuts import get_object_or_404
from django.conf import settings
from .models import Category, Post
class PostListView(ArchiveIndexView):
queryset = Post.objects.select_related().filter(published=True)
date_field = 'date'
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
template_name_suffix = '_list'
context_object_name = 'object_list'
class PostListCategoryView(PostListView):
template_name_suffix = '_list_category'
def get_queryset(self):
qs = super(PostListCategoryView, self).get_queryset()
self.category = get_object_or_404(Category, slug=self.kwargs['slug'])
return qs.filter(category=self.category)
def get_context_data(self, **kwargs):
context = super(PostListCategoryView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
class PostListMonthView(MonthArchiveView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
class PostDetailView(DateDetailView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
|
<commit_before>from django.views.generic import ListView, MonthArchiveView, DateDetailView
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.conf import settings
from .models import Category, Post
class PostListView(ListView):
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
def get_queryset(self):
return Post.objects.select_related().filter(
published=True, date__lte=timezone.now())
class PostListCategoryView(ListView):
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
template_name_suffix = '_list_category'
def get_queryset(self):
self.category = get_object_or_404(Category, slug=self.kwargs['slug'])
return Post.objects.select_related().filter(
published=True,
date__lte=timezone.now(),
category=self.category)
def get_context_data(self, **kwargs):
context = super(PostListCategoryView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
class PostListMonthView(MonthArchiveView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
class PostDetailView(DateDetailView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
<commit_msg>Use ArchiveIndexView to reduce code<commit_after>
|
from django.views.generic import ArchiveIndexView, MonthArchiveView, DateDetailView
from django.shortcuts import get_object_or_404
from django.conf import settings
from .models import Category, Post
class PostListView(ArchiveIndexView):
queryset = Post.objects.select_related().filter(published=True)
date_field = 'date'
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
template_name_suffix = '_list'
context_object_name = 'object_list'
class PostListCategoryView(PostListView):
template_name_suffix = '_list_category'
def get_queryset(self):
qs = super(PostListCategoryView, self).get_queryset()
self.category = get_object_or_404(Category, slug=self.kwargs['slug'])
return qs.filter(category=self.category)
def get_context_data(self, **kwargs):
context = super(PostListCategoryView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
class PostListMonthView(MonthArchiveView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
class PostDetailView(DateDetailView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
|
from django.views.generic import ListView, MonthArchiveView, DateDetailView
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.conf import settings
from .models import Category, Post
class PostListView(ListView):
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
def get_queryset(self):
return Post.objects.select_related().filter(
published=True, date__lte=timezone.now())
class PostListCategoryView(ListView):
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
template_name_suffix = '_list_category'
def get_queryset(self):
self.category = get_object_or_404(Category, slug=self.kwargs['slug'])
return Post.objects.select_related().filter(
published=True,
date__lte=timezone.now(),
category=self.category)
def get_context_data(self, **kwargs):
context = super(PostListCategoryView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
class PostListMonthView(MonthArchiveView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
class PostDetailView(DateDetailView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
Use ArchiveIndexView to reduce codefrom django.views.generic import ArchiveIndexView, MonthArchiveView, DateDetailView
from django.shortcuts import get_object_or_404
from django.conf import settings
from .models import Category, Post
class PostListView(ArchiveIndexView):
queryset = Post.objects.select_related().filter(published=True)
date_field = 'date'
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
template_name_suffix = '_list'
context_object_name = 'object_list'
class PostListCategoryView(PostListView):
template_name_suffix = '_list_category'
def get_queryset(self):
qs = super(PostListCategoryView, self).get_queryset()
self.category = get_object_or_404(Category, slug=self.kwargs['slug'])
return qs.filter(category=self.category)
def get_context_data(self, **kwargs):
context = super(PostListCategoryView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
class PostListMonthView(MonthArchiveView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
class PostDetailView(DateDetailView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
|
<commit_before>from django.views.generic import ListView, MonthArchiveView, DateDetailView
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.conf import settings
from .models import Category, Post
class PostListView(ListView):
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
def get_queryset(self):
return Post.objects.select_related().filter(
published=True, date__lte=timezone.now())
class PostListCategoryView(ListView):
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
template_name_suffix = '_list_category'
def get_queryset(self):
self.category = get_object_or_404(Category, slug=self.kwargs['slug'])
return Post.objects.select_related().filter(
published=True,
date__lte=timezone.now(),
category=self.category)
def get_context_data(self, **kwargs):
context = super(PostListCategoryView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
class PostListMonthView(MonthArchiveView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
class PostDetailView(DateDetailView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
<commit_msg>Use ArchiveIndexView to reduce code<commit_after>from django.views.generic import ArchiveIndexView, MonthArchiveView, DateDetailView
from django.shortcuts import get_object_or_404
from django.conf import settings
from .models import Category, Post
class PostListView(ArchiveIndexView):
queryset = Post.objects.select_related().filter(published=True)
date_field = 'date'
paginate_by = getattr(settings, 'NEWS_PER_PAGE', 10)
template_name_suffix = '_list'
context_object_name = 'object_list'
class PostListCategoryView(PostListView):
template_name_suffix = '_list_category'
def get_queryset(self):
qs = super(PostListCategoryView, self).get_queryset()
self.category = get_object_or_404(Category, slug=self.kwargs['slug'])
return qs.filter(category=self.category)
def get_context_data(self, **kwargs):
context = super(PostListCategoryView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
class PostListMonthView(MonthArchiveView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
class PostDetailView(DateDetailView):
queryset = Post.objects.filter(published=True)
month_format = '%m'
date_field = 'date_url'
|
306ff2e0bff0b6bc0babec90a512c8a2919168a1
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
from humod import __version__
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod'])]
try:
os.stat('/etc/ppp/options')
except OSError:
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod']),
('/etc/ppp/options', ['conf/options'])]
setup(name='humod',
version=__version__,
packages=['humod'],
description='Access SMS, GSM and 3G features of Huawei and '
'compatible modems via clean and pragmatic Python API',
author='Slawek Ligus, František Malina',
author_email='root@ooz.ie',
url='https://github.com/oozie/pyhumod',
license='BSD',
platforms=['Linux'],
install_requires=['pyserial'],
data_files=CONFIG_FILES,
classifiers=['License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Software Development :: Libraries'])
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
from humod import __version__
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod'])]
try:
os.stat('/etc/ppp/options')
except OSError:
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod']),
('/etc/ppp/options', ['conf/options'])]
setup(name='pyhumod',
version=__version__,
packages=['humod'],
description='Access SMS, GSM and 3G features of Huawei and '
'compatible modems via clean and pragmatic Python API',
author='Slawek Ligus, František Malina',
author_email='root@ooz.ie',
url='https://github.com/oozie/pyhumod',
license='BSD',
platforms=['Linux'],
install_requires=['pyserial'],
data_files=CONFIG_FILES,
classifiers=['License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Software Development :: Libraries'])
|
Change the name back to pyhumod
|
Change the name back to pyhumod
With the change of name from pyhumod to humod this would be a separate pypi package and we don't want that.
|
Python
|
bsd-3-clause
|
oozie/pyhumod
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
from humod import __version__
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod'])]
try:
os.stat('/etc/ppp/options')
except OSError:
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod']),
('/etc/ppp/options', ['conf/options'])]
setup(name='humod',
version=__version__,
packages=['humod'],
description='Access SMS, GSM and 3G features of Huawei and '
'compatible modems via clean and pragmatic Python API',
author='Slawek Ligus, František Malina',
author_email='root@ooz.ie',
url='https://github.com/oozie/pyhumod',
license='BSD',
platforms=['Linux'],
install_requires=['pyserial'],
data_files=CONFIG_FILES,
classifiers=['License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Software Development :: Libraries'])
Change the name back to pyhumod
With the change of name from pyhumod to humod this would be a separate pypi package and we don't want that.
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
from humod import __version__
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod'])]
try:
os.stat('/etc/ppp/options')
except OSError:
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod']),
('/etc/ppp/options', ['conf/options'])]
setup(name='pyhumod',
version=__version__,
packages=['humod'],
description='Access SMS, GSM and 3G features of Huawei and '
'compatible modems via clean and pragmatic Python API',
author='Slawek Ligus, František Malina',
author_email='root@ooz.ie',
url='https://github.com/oozie/pyhumod',
license='BSD',
platforms=['Linux'],
install_requires=['pyserial'],
data_files=CONFIG_FILES,
classifiers=['License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Software Development :: Libraries'])
|
<commit_before># -*- coding: utf-8 -*-
import os
from distutils.core import setup
from humod import __version__
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod'])]
try:
os.stat('/etc/ppp/options')
except OSError:
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod']),
('/etc/ppp/options', ['conf/options'])]
setup(name='humod',
version=__version__,
packages=['humod'],
description='Access SMS, GSM and 3G features of Huawei and '
'compatible modems via clean and pragmatic Python API',
author='Slawek Ligus, František Malina',
author_email='root@ooz.ie',
url='https://github.com/oozie/pyhumod',
license='BSD',
platforms=['Linux'],
install_requires=['pyserial'],
data_files=CONFIG_FILES,
classifiers=['License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Software Development :: Libraries'])
<commit_msg>Change the name back to pyhumod
With the change of name from pyhumod to humod this would be a separate pypi package and we don't want that.<commit_after>
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
from humod import __version__
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod'])]
try:
os.stat('/etc/ppp/options')
except OSError:
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod']),
('/etc/ppp/options', ['conf/options'])]
setup(name='pyhumod',
version=__version__,
packages=['humod'],
description='Access SMS, GSM and 3G features of Huawei and '
'compatible modems via clean and pragmatic Python API',
author='Slawek Ligus, František Malina',
author_email='root@ooz.ie',
url='https://github.com/oozie/pyhumod',
license='BSD',
platforms=['Linux'],
install_requires=['pyserial'],
data_files=CONFIG_FILES,
classifiers=['License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Software Development :: Libraries'])
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
from humod import __version__
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod'])]
try:
os.stat('/etc/ppp/options')
except OSError:
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod']),
('/etc/ppp/options', ['conf/options'])]
setup(name='humod',
version=__version__,
packages=['humod'],
description='Access SMS, GSM and 3G features of Huawei and '
'compatible modems via clean and pragmatic Python API',
author='Slawek Ligus, František Malina',
author_email='root@ooz.ie',
url='https://github.com/oozie/pyhumod',
license='BSD',
platforms=['Linux'],
install_requires=['pyserial'],
data_files=CONFIG_FILES,
classifiers=['License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Software Development :: Libraries'])
Change the name back to pyhumod
With the change of name from pyhumod to humod this would be a separate pypi package and we don't want that.# -*- coding: utf-8 -*-
import os
from distutils.core import setup
from humod import __version__
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod'])]
try:
os.stat('/etc/ppp/options')
except OSError:
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod']),
('/etc/ppp/options', ['conf/options'])]
setup(name='pyhumod',
version=__version__,
packages=['humod'],
description='Access SMS, GSM and 3G features of Huawei and '
'compatible modems via clean and pragmatic Python API',
author='Slawek Ligus, František Malina',
author_email='root@ooz.ie',
url='https://github.com/oozie/pyhumod',
license='BSD',
platforms=['Linux'],
install_requires=['pyserial'],
data_files=CONFIG_FILES,
classifiers=['License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Software Development :: Libraries'])
|
<commit_before># -*- coding: utf-8 -*-
import os
from distutils.core import setup
from humod import __version__
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod'])]
try:
os.stat('/etc/ppp/options')
except OSError:
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod']),
('/etc/ppp/options', ['conf/options'])]
setup(name='humod',
version=__version__,
packages=['humod'],
description='Access SMS, GSM and 3G features of Huawei and '
'compatible modems via clean and pragmatic Python API',
author='Slawek Ligus, František Malina',
author_email='root@ooz.ie',
url='https://github.com/oozie/pyhumod',
license='BSD',
platforms=['Linux'],
install_requires=['pyserial'],
data_files=CONFIG_FILES,
classifiers=['License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Software Development :: Libraries'])
<commit_msg>Change the name back to pyhumod
With the change of name from pyhumod to humod this would be a separate pypi package and we don't want that.<commit_after># -*- coding: utf-8 -*-
import os
from distutils.core import setup
from humod import __version__
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod'])]
try:
os.stat('/etc/ppp/options')
except OSError:
CONFIG_FILES = [('/etc/ppp/peers', ['conf/humod']),
('/etc/ppp/options', ['conf/options'])]
setup(name='pyhumod',
version=__version__,
packages=['humod'],
description='Access SMS, GSM and 3G features of Huawei and '
'compatible modems via clean and pragmatic Python API',
author='Slawek Ligus, František Malina',
author_email='root@ooz.ie',
url='https://github.com/oozie/pyhumod',
license='BSD',
platforms=['Linux'],
install_requires=['pyserial'],
data_files=CONFIG_FILES,
classifiers=['License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Software Development :: Libraries'])
|
9108add7219d3d70ff0aab86c13cd4077cda6619
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='ingreedypy',
py_modules=['ingreedypy'],
version='1.3.2',
description='ingreedy-py parses recipe ingredient lines into a object',
author='Scott Cooper',
author_email='scttcper@gmail.com',
url='https://github.com/openculinary/ingreedy-py',
keywords=['ingreedy', 'ingreedypy', 'recipe', 'parser'],
install_requires=[
'parsimonious'
],
extras_require={
'tests': [
'pytest',
'pytest-cov',
]
},
classifiers=[],
)
|
from setuptools import setup
with open('README.md', 'r', encoding='utf-8') as fh:
long_description = fh.read()
setup(
name='ingreedypy',
py_modules=['ingreedypy'],
version='1.3.2',
description='ingreedy-py parses recipe ingredient lines into a object',
long_description=long_description,
long_description_content_type='text/markdown',
author='Scott Cooper',
author_email='scttcper@gmail.com',
url='https://github.com/openculinary/ingreedy-py',
keywords=['ingreedy', 'ingreedypy', 'recipe', 'parser'],
install_requires=[
'parsimonious'
],
extras_require={
'tests': [
'pytest',
'pytest-cov',
]
},
classifiers=[],
)
|
Add long description for package
|
Add long description for package
|
Python
|
mit
|
scttcper/ingreedy-py
|
from setuptools import setup
setup(
name='ingreedypy',
py_modules=['ingreedypy'],
version='1.3.2',
description='ingreedy-py parses recipe ingredient lines into a object',
author='Scott Cooper',
author_email='scttcper@gmail.com',
url='https://github.com/openculinary/ingreedy-py',
keywords=['ingreedy', 'ingreedypy', 'recipe', 'parser'],
install_requires=[
'parsimonious'
],
extras_require={
'tests': [
'pytest',
'pytest-cov',
]
},
classifiers=[],
)
Add long description for package
|
from setuptools import setup
with open('README.md', 'r', encoding='utf-8') as fh:
long_description = fh.read()
setup(
name='ingreedypy',
py_modules=['ingreedypy'],
version='1.3.2',
description='ingreedy-py parses recipe ingredient lines into a object',
long_description=long_description,
long_description_content_type='text/markdown',
author='Scott Cooper',
author_email='scttcper@gmail.com',
url='https://github.com/openculinary/ingreedy-py',
keywords=['ingreedy', 'ingreedypy', 'recipe', 'parser'],
install_requires=[
'parsimonious'
],
extras_require={
'tests': [
'pytest',
'pytest-cov',
]
},
classifiers=[],
)
|
<commit_before>from setuptools import setup
setup(
name='ingreedypy',
py_modules=['ingreedypy'],
version='1.3.2',
description='ingreedy-py parses recipe ingredient lines into a object',
author='Scott Cooper',
author_email='scttcper@gmail.com',
url='https://github.com/openculinary/ingreedy-py',
keywords=['ingreedy', 'ingreedypy', 'recipe', 'parser'],
install_requires=[
'parsimonious'
],
extras_require={
'tests': [
'pytest',
'pytest-cov',
]
},
classifiers=[],
)
<commit_msg>Add long description for package<commit_after>
|
from setuptools import setup
with open('README.md', 'r', encoding='utf-8') as fh:
long_description = fh.read()
setup(
name='ingreedypy',
py_modules=['ingreedypy'],
version='1.3.2',
description='ingreedy-py parses recipe ingredient lines into a object',
long_description=long_description,
long_description_content_type='text/markdown',
author='Scott Cooper',
author_email='scttcper@gmail.com',
url='https://github.com/openculinary/ingreedy-py',
keywords=['ingreedy', 'ingreedypy', 'recipe', 'parser'],
install_requires=[
'parsimonious'
],
extras_require={
'tests': [
'pytest',
'pytest-cov',
]
},
classifiers=[],
)
|
from setuptools import setup
setup(
name='ingreedypy',
py_modules=['ingreedypy'],
version='1.3.2',
description='ingreedy-py parses recipe ingredient lines into a object',
author='Scott Cooper',
author_email='scttcper@gmail.com',
url='https://github.com/openculinary/ingreedy-py',
keywords=['ingreedy', 'ingreedypy', 'recipe', 'parser'],
install_requires=[
'parsimonious'
],
extras_require={
'tests': [
'pytest',
'pytest-cov',
]
},
classifiers=[],
)
Add long description for packagefrom setuptools import setup
with open('README.md', 'r', encoding='utf-8') as fh:
long_description = fh.read()
setup(
name='ingreedypy',
py_modules=['ingreedypy'],
version='1.3.2',
description='ingreedy-py parses recipe ingredient lines into a object',
long_description=long_description,
long_description_content_type='text/markdown',
author='Scott Cooper',
author_email='scttcper@gmail.com',
url='https://github.com/openculinary/ingreedy-py',
keywords=['ingreedy', 'ingreedypy', 'recipe', 'parser'],
install_requires=[
'parsimonious'
],
extras_require={
'tests': [
'pytest',
'pytest-cov',
]
},
classifiers=[],
)
|
<commit_before>from setuptools import setup
setup(
name='ingreedypy',
py_modules=['ingreedypy'],
version='1.3.2',
description='ingreedy-py parses recipe ingredient lines into a object',
author='Scott Cooper',
author_email='scttcper@gmail.com',
url='https://github.com/openculinary/ingreedy-py',
keywords=['ingreedy', 'ingreedypy', 'recipe', 'parser'],
install_requires=[
'parsimonious'
],
extras_require={
'tests': [
'pytest',
'pytest-cov',
]
},
classifiers=[],
)
<commit_msg>Add long description for package<commit_after>from setuptools import setup
with open('README.md', 'r', encoding='utf-8') as fh:
long_description = fh.read()
setup(
name='ingreedypy',
py_modules=['ingreedypy'],
version='1.3.2',
description='ingreedy-py parses recipe ingredient lines into a object',
long_description=long_description,
long_description_content_type='text/markdown',
author='Scott Cooper',
author_email='scttcper@gmail.com',
url='https://github.com/openculinary/ingreedy-py',
keywords=['ingreedy', 'ingreedypy', 'recipe', 'parser'],
install_requires=[
'parsimonious'
],
extras_require={
'tests': [
'pytest',
'pytest-cov',
]
},
classifiers=[],
)
|
d0f6ab8c4db9de6f5d8c59bdb0c19baa2e758b50
|
setup.py
|
setup.py
|
import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.2.0',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
author='Flanders Heritage Agency',
author_email='ict@onroerenderfgoed.be',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider',
test_suite='nose.collector'
)
|
import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider>=0.5.0',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.2.0',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
author='Flanders Heritage Agency',
author_email='ict@onroerenderfgoed.be',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider',
test_suite='nose.collector'
)
|
Set the min version of skosprovider to 0.5.0
|
Set the min version of skosprovider to 0.5.0
Skosprovider 0.5.0 is required because of the
ProviderUnavailableException.
|
Python
|
mit
|
OnroerendErfgoed/skosprovider_heritagedata
|
import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.2.0',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
author='Flanders Heritage Agency',
author_email='ict@onroerenderfgoed.be',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider',
test_suite='nose.collector'
)
Set the min version of skosprovider to 0.5.0
Skosprovider 0.5.0 is required because of the
ProviderUnavailableException.
|
import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider>=0.5.0',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.2.0',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
author='Flanders Heritage Agency',
author_email='ict@onroerenderfgoed.be',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider',
test_suite='nose.collector'
)
|
<commit_before>import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.2.0',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
author='Flanders Heritage Agency',
author_email='ict@onroerenderfgoed.be',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider',
test_suite='nose.collector'
)
<commit_msg>Set the min version of skosprovider to 0.5.0
Skosprovider 0.5.0 is required because of the
ProviderUnavailableException.<commit_after>
|
import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider>=0.5.0',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.2.0',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
author='Flanders Heritage Agency',
author_email='ict@onroerenderfgoed.be',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider',
test_suite='nose.collector'
)
|
import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.2.0',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
author='Flanders Heritage Agency',
author_email='ict@onroerenderfgoed.be',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider',
test_suite='nose.collector'
)
Set the min version of skosprovider to 0.5.0
Skosprovider 0.5.0 is required because of the
ProviderUnavailableException.import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider>=0.5.0',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.2.0',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
author='Flanders Heritage Agency',
author_email='ict@onroerenderfgoed.be',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider',
test_suite='nose.collector'
)
|
<commit_before>import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.2.0',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
author='Flanders Heritage Agency',
author_email='ict@onroerenderfgoed.be',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider',
test_suite='nose.collector'
)
<commit_msg>Set the min version of skosprovider to 0.5.0
Skosprovider 0.5.0 is required because of the
ProviderUnavailableException.<commit_after>import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider>=0.5.0',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.2.0',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
author='Flanders Heritage Agency',
author_email='ict@onroerenderfgoed.be',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider',
test_suite='nose.collector'
)
|
c2d8089559dbb448d378ba15042031f9ca18d7e8
|
setup.py
|
setup.py
|
from setuptools import setup
import sys
VERSION = "0.2.0"
if sys.version_info >= (3,):
requirements = ["websocket-client-py3"]
else:
requirements = ["websocket-client"]
setup(
name="pusherclient",
version=VERSION,
description="Pusher websocket client for python",
author="Erik Kulyk",
author_email="e.kulyk@gmail.com",
license="",
url="",
install_requires=requirements,
packages=["pusherclient"],
)
|
from setuptools import setup
import sys
VERSION = "0.2.0"
requirements = ["websocket-client"]
setup(
name="pusherclient",
version=VERSION,
description="Pusher websocket client for python",
author="Erik Kulyk",
author_email="e.kulyk@gmail.com",
license="",
url="",
install_requires=requirements,
packages=["pusherclient"],
)
|
Switch to websocket-client for python3
|
Switch to websocket-client for python3
|
Python
|
mit
|
bartbroere/PythonPusherClient,mattsunsjf/PythonPusherClient,ekulyk/PythonPusherClient
|
from setuptools import setup
import sys
VERSION = "0.2.0"
if sys.version_info >= (3,):
requirements = ["websocket-client-py3"]
else:
requirements = ["websocket-client"]
setup(
name="pusherclient",
version=VERSION,
description="Pusher websocket client for python",
author="Erik Kulyk",
author_email="e.kulyk@gmail.com",
license="",
url="",
install_requires=requirements,
packages=["pusherclient"],
)
Switch to websocket-client for python3
|
from setuptools import setup
import sys
VERSION = "0.2.0"
requirements = ["websocket-client"]
setup(
name="pusherclient",
version=VERSION,
description="Pusher websocket client for python",
author="Erik Kulyk",
author_email="e.kulyk@gmail.com",
license="",
url="",
install_requires=requirements,
packages=["pusherclient"],
)
|
<commit_before>from setuptools import setup
import sys
VERSION = "0.2.0"
if sys.version_info >= (3,):
requirements = ["websocket-client-py3"]
else:
requirements = ["websocket-client"]
setup(
name="pusherclient",
version=VERSION,
description="Pusher websocket client for python",
author="Erik Kulyk",
author_email="e.kulyk@gmail.com",
license="",
url="",
install_requires=requirements,
packages=["pusherclient"],
)
<commit_msg>Switch to websocket-client for python3<commit_after>
|
from setuptools import setup
import sys
VERSION = "0.2.0"
requirements = ["websocket-client"]
setup(
name="pusherclient",
version=VERSION,
description="Pusher websocket client for python",
author="Erik Kulyk",
author_email="e.kulyk@gmail.com",
license="",
url="",
install_requires=requirements,
packages=["pusherclient"],
)
|
from setuptools import setup
import sys
VERSION = "0.2.0"
if sys.version_info >= (3,):
requirements = ["websocket-client-py3"]
else:
requirements = ["websocket-client"]
setup(
name="pusherclient",
version=VERSION,
description="Pusher websocket client for python",
author="Erik Kulyk",
author_email="e.kulyk@gmail.com",
license="",
url="",
install_requires=requirements,
packages=["pusherclient"],
)
Switch to websocket-client for python3from setuptools import setup
import sys
VERSION = "0.2.0"
requirements = ["websocket-client"]
setup(
name="pusherclient",
version=VERSION,
description="Pusher websocket client for python",
author="Erik Kulyk",
author_email="e.kulyk@gmail.com",
license="",
url="",
install_requires=requirements,
packages=["pusherclient"],
)
|
<commit_before>from setuptools import setup
import sys
VERSION = "0.2.0"
if sys.version_info >= (3,):
requirements = ["websocket-client-py3"]
else:
requirements = ["websocket-client"]
setup(
name="pusherclient",
version=VERSION,
description="Pusher websocket client for python",
author="Erik Kulyk",
author_email="e.kulyk@gmail.com",
license="",
url="",
install_requires=requirements,
packages=["pusherclient"],
)
<commit_msg>Switch to websocket-client for python3<commit_after>from setuptools import setup
import sys
VERSION = "0.2.0"
requirements = ["websocket-client"]
setup(
name="pusherclient",
version=VERSION,
description="Pusher websocket client for python",
author="Erik Kulyk",
author_email="e.kulyk@gmail.com",
license="",
url="",
install_requires=requirements,
packages=["pusherclient"],
)
|
e1e56796bd8e4f3b3833f34266155b43d8156c6e
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='django-forge',
version=__import__('forge').__version__,
author='Justin Bronn',
author_email='jbronn@gmail.com',
description='A Django implementation of the Puppet Forge web API.',
url='https://github.com/jbronn/django-forge',
download_url='http://pypi.python.org/pypi/django-forge/',
install_requires=[
'Django>=1.4',
'semantic_version>=2.1.2',
],
packages=['forge',
'forge/management',
'forge/management/commands',
'forge/tests',],
package_data={'apache': ['forge/apache'],
'templates': ['forge/templates']},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
],
)
|
from setuptools import setup
setup(name='django-forge',
version=__import__('forge').__version__,
author='Justin Bronn',
author_email='jbronn@gmail.com',
description='A Django implementation of the Puppet Forge web API.',
url='https://github.com/jbronn/django-forge',
download_url='http://pypi.python.org/pypi/django-forge/',
install_requires=[
'Django>=1.4',
'requests>=2',
'semantic_version>=2.1.2',
],
packages=['forge',
'forge/management',
'forge/management/commands',
'forge/tests',],
package_data={'apache': ['forge/apache'],
'templates': ['forge/templates']},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
],
)
|
Add requests as an installation requirement.
|
Add requests as an installation requirement.
|
Python
|
apache-2.0
|
jbronn/django-forge,ocadotechnology/django-forge,ocadotechnology/django-forge,jbronn/django-forge
|
from setuptools import setup
setup(name='django-forge',
version=__import__('forge').__version__,
author='Justin Bronn',
author_email='jbronn@gmail.com',
description='A Django implementation of the Puppet Forge web API.',
url='https://github.com/jbronn/django-forge',
download_url='http://pypi.python.org/pypi/django-forge/',
install_requires=[
'Django>=1.4',
'semantic_version>=2.1.2',
],
packages=['forge',
'forge/management',
'forge/management/commands',
'forge/tests',],
package_data={'apache': ['forge/apache'],
'templates': ['forge/templates']},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
],
)
Add requests as an installation requirement.
|
from setuptools import setup
setup(name='django-forge',
version=__import__('forge').__version__,
author='Justin Bronn',
author_email='jbronn@gmail.com',
description='A Django implementation of the Puppet Forge web API.',
url='https://github.com/jbronn/django-forge',
download_url='http://pypi.python.org/pypi/django-forge/',
install_requires=[
'Django>=1.4',
'requests>=2',
'semantic_version>=2.1.2',
],
packages=['forge',
'forge/management',
'forge/management/commands',
'forge/tests',],
package_data={'apache': ['forge/apache'],
'templates': ['forge/templates']},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
],
)
|
<commit_before>from setuptools import setup
setup(name='django-forge',
version=__import__('forge').__version__,
author='Justin Bronn',
author_email='jbronn@gmail.com',
description='A Django implementation of the Puppet Forge web API.',
url='https://github.com/jbronn/django-forge',
download_url='http://pypi.python.org/pypi/django-forge/',
install_requires=[
'Django>=1.4',
'semantic_version>=2.1.2',
],
packages=['forge',
'forge/management',
'forge/management/commands',
'forge/tests',],
package_data={'apache': ['forge/apache'],
'templates': ['forge/templates']},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
],
)
<commit_msg>Add requests as an installation requirement.<commit_after>
|
from setuptools import setup
setup(name='django-forge',
version=__import__('forge').__version__,
author='Justin Bronn',
author_email='jbronn@gmail.com',
description='A Django implementation of the Puppet Forge web API.',
url='https://github.com/jbronn/django-forge',
download_url='http://pypi.python.org/pypi/django-forge/',
install_requires=[
'Django>=1.4',
'requests>=2',
'semantic_version>=2.1.2',
],
packages=['forge',
'forge/management',
'forge/management/commands',
'forge/tests',],
package_data={'apache': ['forge/apache'],
'templates': ['forge/templates']},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
],
)
|
from setuptools import setup
setup(name='django-forge',
version=__import__('forge').__version__,
author='Justin Bronn',
author_email='jbronn@gmail.com',
description='A Django implementation of the Puppet Forge web API.',
url='https://github.com/jbronn/django-forge',
download_url='http://pypi.python.org/pypi/django-forge/',
install_requires=[
'Django>=1.4',
'semantic_version>=2.1.2',
],
packages=['forge',
'forge/management',
'forge/management/commands',
'forge/tests',],
package_data={'apache': ['forge/apache'],
'templates': ['forge/templates']},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
],
)
Add requests as an installation requirement.from setuptools import setup
setup(name='django-forge',
version=__import__('forge').__version__,
author='Justin Bronn',
author_email='jbronn@gmail.com',
description='A Django implementation of the Puppet Forge web API.',
url='https://github.com/jbronn/django-forge',
download_url='http://pypi.python.org/pypi/django-forge/',
install_requires=[
'Django>=1.4',
'requests>=2',
'semantic_version>=2.1.2',
],
packages=['forge',
'forge/management',
'forge/management/commands',
'forge/tests',],
package_data={'apache': ['forge/apache'],
'templates': ['forge/templates']},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
],
)
|
<commit_before>from setuptools import setup
setup(name='django-forge',
version=__import__('forge').__version__,
author='Justin Bronn',
author_email='jbronn@gmail.com',
description='A Django implementation of the Puppet Forge web API.',
url='https://github.com/jbronn/django-forge',
download_url='http://pypi.python.org/pypi/django-forge/',
install_requires=[
'Django>=1.4',
'semantic_version>=2.1.2',
],
packages=['forge',
'forge/management',
'forge/management/commands',
'forge/tests',],
package_data={'apache': ['forge/apache'],
'templates': ['forge/templates']},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
],
)
<commit_msg>Add requests as an installation requirement.<commit_after>from setuptools import setup
setup(name='django-forge',
version=__import__('forge').__version__,
author='Justin Bronn',
author_email='jbronn@gmail.com',
description='A Django implementation of the Puppet Forge web API.',
url='https://github.com/jbronn/django-forge',
download_url='http://pypi.python.org/pypi/django-forge/',
install_requires=[
'Django>=1.4',
'requests>=2',
'semantic_version>=2.1.2',
],
packages=['forge',
'forge/management',
'forge/management/commands',
'forge/tests',],
package_data={'apache': ['forge/apache'],
'templates': ['forge/templates']},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
],
)
|
b9a1a47361df09c4ef9b717afd6358aff982ecc5
|
setup.py
|
setup.py
|
from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='emmanuel.dyan@inetprocess.com',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='''
[console_scripts]
marina=cli:main
docker-clean=docker_clean:main
{}
'''.format(get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'requests>=2.11.0,<2.12',
'docker-compose',
'configobj'
]
)
|
from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
def get_console_scripts():
"""Guess if we use marina as a package or if it has been cloned"""
scripts = "[console_scripts]\n"
try:
from marina import cli, docker_clean
scripts += "marina=marina.cli:main\n"
scripts += "docker-clean=marina.docker_clean:main\n"
except Exception:
scripts += "marina=cli:main\n"
scripts += "docker-clean=docker_clean:main\n"
return scripts
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='emmanuel.dyan@inetprocess.com',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='{}{}'.format(get_console_scripts(), get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'docker-compose',
'configobj',
'requests>=2.11.0,<2.12'
]
)
|
Prepare to use marina as a package
|
Prepare to use marina as a package
|
Python
|
apache-2.0
|
inetprocess/docker-lamp,inetprocess/docker-lamp,edyan/stakkr,inetprocess/docker-lamp,edyan/stakkr,edyan/stakkr
|
from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='emmanuel.dyan@inetprocess.com',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='''
[console_scripts]
marina=cli:main
docker-clean=docker_clean:main
{}
'''.format(get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'requests>=2.11.0,<2.12',
'docker-compose',
'configobj'
]
)
Prepare to use marina as a package
|
from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
def get_console_scripts():
"""Guess if we use marina as a package or if it has been cloned"""
scripts = "[console_scripts]\n"
try:
from marina import cli, docker_clean
scripts += "marina=marina.cli:main\n"
scripts += "docker-clean=marina.docker_clean:main\n"
except Exception:
scripts += "marina=cli:main\n"
scripts += "docker-clean=docker_clean:main\n"
return scripts
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='emmanuel.dyan@inetprocess.com',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='{}{}'.format(get_console_scripts(), get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'docker-compose',
'configobj',
'requests>=2.11.0,<2.12'
]
)
|
<commit_before>from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='emmanuel.dyan@inetprocess.com',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='''
[console_scripts]
marina=cli:main
docker-clean=docker_clean:main
{}
'''.format(get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'requests>=2.11.0,<2.12',
'docker-compose',
'configobj'
]
)
<commit_msg>Prepare to use marina as a package<commit_after>
|
from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
def get_console_scripts():
"""Guess if we use marina as a package or if it has been cloned"""
scripts = "[console_scripts]\n"
try:
from marina import cli, docker_clean
scripts += "marina=marina.cli:main\n"
scripts += "docker-clean=marina.docker_clean:main\n"
except Exception:
scripts += "marina=cli:main\n"
scripts += "docker-clean=docker_clean:main\n"
return scripts
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='emmanuel.dyan@inetprocess.com',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='{}{}'.format(get_console_scripts(), get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'docker-compose',
'configobj',
'requests>=2.11.0,<2.12'
]
)
|
from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='emmanuel.dyan@inetprocess.com',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='''
[console_scripts]
marina=cli:main
docker-clean=docker_clean:main
{}
'''.format(get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'requests>=2.11.0,<2.12',
'docker-compose',
'configobj'
]
)
Prepare to use marina as a packagefrom marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
def get_console_scripts():
"""Guess if we use marina as a package or if it has been cloned"""
scripts = "[console_scripts]\n"
try:
from marina import cli, docker_clean
scripts += "marina=marina.cli:main\n"
scripts += "docker-clean=marina.docker_clean:main\n"
except Exception:
scripts += "marina=cli:main\n"
scripts += "docker-clean=docker_clean:main\n"
return scripts
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='emmanuel.dyan@inetprocess.com',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='{}{}'.format(get_console_scripts(), get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'docker-compose',
'configobj',
'requests>=2.11.0,<2.12'
]
)
|
<commit_before>from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='emmanuel.dyan@inetprocess.com',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='''
[console_scripts]
marina=cli:main
docker-clean=docker_clean:main
{}
'''.format(get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'requests>=2.11.0,<2.12',
'docker-compose',
'configobj'
]
)
<commit_msg>Prepare to use marina as a package<commit_after>from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
def get_console_scripts():
"""Guess if we use marina as a package or if it has been cloned"""
scripts = "[console_scripts]\n"
try:
from marina import cli, docker_clean
scripts += "marina=marina.cli:main\n"
scripts += "docker-clean=marina.docker_clean:main\n"
except Exception:
scripts += "marina=cli:main\n"
scripts += "docker-clean=docker_clean:main\n"
return scripts
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='emmanuel.dyan@inetprocess.com',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='{}{}'.format(get_console_scripts(), get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'docker-compose',
'configobj',
'requests>=2.11.0,<2.12'
]
)
|
c4b1920637535b4c5844bce9d32c448697a2718f
|
setup.py
|
setup.py
|
import sys
from setuptools import setup, find_packages
with open('README.rst') as f:
long_description = f.read()
setup(
name='xopen',
use_scm_version={'write_to': 'src/xopen/_version.py'},
setup_requires=['setuptools_scm'], # Support pip versions that don't know about pyproject.toml
author='Marcel Martin',
author_email='mail@marcelm.net',
url='https://github.com/marcelm/xopen/',
description='Open compressed files transparently',
long_description=long_description,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
package_data={"xopen": ["py.typed"]},
extras_require={
'dev': ['pytest'],
':sys_platform=="linux" and python_implementation != "PyPy"': ['isal>=0.3.0']
},
python_requires='>=3.6',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
]
)
|
import sys
from setuptools import setup, find_packages
with open('README.rst') as f:
long_description = f.read()
setup(
name='xopen',
use_scm_version={'write_to': 'src/xopen/_version.py'},
setup_requires=['setuptools_scm'], # Support pip versions that don't know about pyproject.toml
author='Marcel Martin',
author_email='mail@marcelm.net',
url='https://github.com/marcelm/xopen/',
description='Open compressed files transparently',
long_description=long_description,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
package_data={"xopen": ["py.typed"]},
extras_require={
'dev': ['pytest'],
':(sys_platform=="linux" or sys_platform=="darwin") and python_implementation != "PyPy"': ['isal>=0.4.0']
},
python_requires='>=3.6',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
]
)
|
Make python-isal a requirement on macos as well.
|
Make python-isal a requirement on macos as well.
|
Python
|
mit
|
marcelm/xopen
|
import sys
from setuptools import setup, find_packages
with open('README.rst') as f:
long_description = f.read()
setup(
name='xopen',
use_scm_version={'write_to': 'src/xopen/_version.py'},
setup_requires=['setuptools_scm'], # Support pip versions that don't know about pyproject.toml
author='Marcel Martin',
author_email='mail@marcelm.net',
url='https://github.com/marcelm/xopen/',
description='Open compressed files transparently',
long_description=long_description,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
package_data={"xopen": ["py.typed"]},
extras_require={
'dev': ['pytest'],
':sys_platform=="linux" and python_implementation != "PyPy"': ['isal>=0.3.0']
},
python_requires='>=3.6',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
]
)
Make python-isal a requirement on macos as well.
|
import sys
from setuptools import setup, find_packages
with open('README.rst') as f:
long_description = f.read()
setup(
name='xopen',
use_scm_version={'write_to': 'src/xopen/_version.py'},
setup_requires=['setuptools_scm'], # Support pip versions that don't know about pyproject.toml
author='Marcel Martin',
author_email='mail@marcelm.net',
url='https://github.com/marcelm/xopen/',
description='Open compressed files transparently',
long_description=long_description,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
package_data={"xopen": ["py.typed"]},
extras_require={
'dev': ['pytest'],
':(sys_platform=="linux" or sys_platform=="darwin") and python_implementation != "PyPy"': ['isal>=0.4.0']
},
python_requires='>=3.6',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
]
)
|
<commit_before>import sys
from setuptools import setup, find_packages
with open('README.rst') as f:
long_description = f.read()
setup(
name='xopen',
use_scm_version={'write_to': 'src/xopen/_version.py'},
setup_requires=['setuptools_scm'], # Support pip versions that don't know about pyproject.toml
author='Marcel Martin',
author_email='mail@marcelm.net',
url='https://github.com/marcelm/xopen/',
description='Open compressed files transparently',
long_description=long_description,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
package_data={"xopen": ["py.typed"]},
extras_require={
'dev': ['pytest'],
':sys_platform=="linux" and python_implementation != "PyPy"': ['isal>=0.3.0']
},
python_requires='>=3.6',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
]
)
<commit_msg>Make python-isal a requirement on macos as well.<commit_after>
|
import sys
from setuptools import setup, find_packages
with open('README.rst') as f:
long_description = f.read()
setup(
name='xopen',
use_scm_version={'write_to': 'src/xopen/_version.py'},
setup_requires=['setuptools_scm'], # Support pip versions that don't know about pyproject.toml
author='Marcel Martin',
author_email='mail@marcelm.net',
url='https://github.com/marcelm/xopen/',
description='Open compressed files transparently',
long_description=long_description,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
package_data={"xopen": ["py.typed"]},
extras_require={
'dev': ['pytest'],
':(sys_platform=="linux" or sys_platform=="darwin") and python_implementation != "PyPy"': ['isal>=0.4.0']
},
python_requires='>=3.6',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
]
)
|
import sys
from setuptools import setup, find_packages
with open('README.rst') as f:
long_description = f.read()
setup(
name='xopen',
use_scm_version={'write_to': 'src/xopen/_version.py'},
setup_requires=['setuptools_scm'], # Support pip versions that don't know about pyproject.toml
author='Marcel Martin',
author_email='mail@marcelm.net',
url='https://github.com/marcelm/xopen/',
description='Open compressed files transparently',
long_description=long_description,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
package_data={"xopen": ["py.typed"]},
extras_require={
'dev': ['pytest'],
':sys_platform=="linux" and python_implementation != "PyPy"': ['isal>=0.3.0']
},
python_requires='>=3.6',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
]
)
Make python-isal a requirement on macos as well.import sys
from setuptools import setup, find_packages
with open('README.rst') as f:
long_description = f.read()
setup(
name='xopen',
use_scm_version={'write_to': 'src/xopen/_version.py'},
setup_requires=['setuptools_scm'], # Support pip versions that don't know about pyproject.toml
author='Marcel Martin',
author_email='mail@marcelm.net',
url='https://github.com/marcelm/xopen/',
description='Open compressed files transparently',
long_description=long_description,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
package_data={"xopen": ["py.typed"]},
extras_require={
'dev': ['pytest'],
':(sys_platform=="linux" or sys_platform=="darwin") and python_implementation != "PyPy"': ['isal>=0.4.0']
},
python_requires='>=3.6',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
]
)
|
<commit_before>import sys
from setuptools import setup, find_packages
with open('README.rst') as f:
long_description = f.read()
setup(
name='xopen',
use_scm_version={'write_to': 'src/xopen/_version.py'},
setup_requires=['setuptools_scm'], # Support pip versions that don't know about pyproject.toml
author='Marcel Martin',
author_email='mail@marcelm.net',
url='https://github.com/marcelm/xopen/',
description='Open compressed files transparently',
long_description=long_description,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
package_data={"xopen": ["py.typed"]},
extras_require={
'dev': ['pytest'],
':sys_platform=="linux" and python_implementation != "PyPy"': ['isal>=0.3.0']
},
python_requires='>=3.6',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
]
)
<commit_msg>Make python-isal a requirement on macos as well.<commit_after>import sys
from setuptools import setup, find_packages
with open('README.rst') as f:
long_description = f.read()
setup(
name='xopen',
use_scm_version={'write_to': 'src/xopen/_version.py'},
setup_requires=['setuptools_scm'], # Support pip versions that don't know about pyproject.toml
author='Marcel Martin',
author_email='mail@marcelm.net',
url='https://github.com/marcelm/xopen/',
description='Open compressed files transparently',
long_description=long_description,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
package_data={"xopen": ["py.typed"]},
extras_require={
'dev': ['pytest'],
':(sys_platform=="linux" or sys_platform=="darwin") and python_implementation != "PyPy"': ['isal>=0.4.0']
},
python_requires='>=3.6',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
]
)
|
4b95e714a8b7c4c0aaf57f8d7de6769aa688de04
|
setup.py
|
setup.py
|
"""setup.py"""
#pylint:disable=line-too-long
from setuptools import setup
from codecs import open as codecs_open
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.0.1',
description='JSON-RPC client library.',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.org/',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'future'],
tests_require=['tox'],
classifiers=[
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
"""setup.py"""
#pylint:disable=line-too-long
from setuptools import setup
from codecs import open as codecs_open
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.0.1',
description='JSON-RPC client library',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.org/',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'future'],
tests_require=['tox'],
classifiers=[
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
Remove period after package description
|
Remove period after package description
|
Python
|
mit
|
bcb/jsonrpcclient
|
"""setup.py"""
#pylint:disable=line-too-long
from setuptools import setup
from codecs import open as codecs_open
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.0.1',
description='JSON-RPC client library.',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.org/',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'future'],
tests_require=['tox'],
classifiers=[
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Remove period after package description
|
"""setup.py"""
#pylint:disable=line-too-long
from setuptools import setup
from codecs import open as codecs_open
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.0.1',
description='JSON-RPC client library',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.org/',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'future'],
tests_require=['tox'],
classifiers=[
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
<commit_before>"""setup.py"""
#pylint:disable=line-too-long
from setuptools import setup
from codecs import open as codecs_open
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.0.1',
description='JSON-RPC client library.',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.org/',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'future'],
tests_require=['tox'],
classifiers=[
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Remove period after package description<commit_after>
|
"""setup.py"""
#pylint:disable=line-too-long
from setuptools import setup
from codecs import open as codecs_open
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.0.1',
description='JSON-RPC client library',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.org/',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'future'],
tests_require=['tox'],
classifiers=[
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
"""setup.py"""
#pylint:disable=line-too-long
from setuptools import setup
from codecs import open as codecs_open
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.0.1',
description='JSON-RPC client library.',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.org/',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'future'],
tests_require=['tox'],
classifiers=[
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Remove period after package description"""setup.py"""
#pylint:disable=line-too-long
from setuptools import setup
from codecs import open as codecs_open
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.0.1',
description='JSON-RPC client library',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.org/',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'future'],
tests_require=['tox'],
classifiers=[
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
<commit_before>"""setup.py"""
#pylint:disable=line-too-long
from setuptools import setup
from codecs import open as codecs_open
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.0.1',
description='JSON-RPC client library.',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.org/',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'future'],
tests_require=['tox'],
classifiers=[
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Remove period after package description<commit_after>"""setup.py"""
#pylint:disable=line-too-long
from setuptools import setup
from codecs import open as codecs_open
with codecs_open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with codecs_open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='jsonrpcclient',
version='2.0.1',
description='JSON-RPC client library',
long_description=readme + '\n\n' + history,
author='Beau Barker',
author_email='beauinmelbourne@gmail.com',
url='https://jsonrpcclient.readthedocs.org/',
packages=['jsonrpcclient'],
package_data={'jsonrpcclient': ['response-schema.json']},
include_package_data=True,
install_requires=['jsonschema', 'future'],
tests_require=['tox'],
classifiers=[
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
b0aaedf758e49dbc0b74a9a11ae9dbd424ad401c
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="go-contacts",
version="0.1.5a",
url='http://github.com/praekelt/go-contacts-api',
license='BSD',
description="A contacts and groups API for Vumi Go",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'cyclone',
'go_api==0.3.0',
'vumi>=0.5.2',
'vumi-go',
'confmodel==0.2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
from setuptools import setup, find_packages
setup(
name="go-contacts",
version="0.1.5a",
url='http://github.com/praekelt/go-contacts-api',
license='BSD',
description="A contacts and groups API for Vumi Go",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'cyclone',
'go_api>=0.3.0',
'vumi>=0.5.2',
'vumi-go',
'confmodel==0.2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
Update to less strict version requirement for go_api
|
Update to less strict version requirement for go_api
|
Python
|
bsd-3-clause
|
praekelt/go-contacts-api,praekelt/go-contacts-api
|
from setuptools import setup, find_packages
setup(
name="go-contacts",
version="0.1.5a",
url='http://github.com/praekelt/go-contacts-api',
license='BSD',
description="A contacts and groups API for Vumi Go",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'cyclone',
'go_api==0.3.0',
'vumi>=0.5.2',
'vumi-go',
'confmodel==0.2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
Update to less strict version requirement for go_api
|
from setuptools import setup, find_packages
setup(
name="go-contacts",
version="0.1.5a",
url='http://github.com/praekelt/go-contacts-api',
license='BSD',
description="A contacts and groups API for Vumi Go",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'cyclone',
'go_api>=0.3.0',
'vumi>=0.5.2',
'vumi-go',
'confmodel==0.2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="go-contacts",
version="0.1.5a",
url='http://github.com/praekelt/go-contacts-api',
license='BSD',
description="A contacts and groups API for Vumi Go",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'cyclone',
'go_api==0.3.0',
'vumi>=0.5.2',
'vumi-go',
'confmodel==0.2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
<commit_msg>Update to less strict version requirement for go_api<commit_after>
|
from setuptools import setup, find_packages
setup(
name="go-contacts",
version="0.1.5a",
url='http://github.com/praekelt/go-contacts-api',
license='BSD',
description="A contacts and groups API for Vumi Go",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'cyclone',
'go_api>=0.3.0',
'vumi>=0.5.2',
'vumi-go',
'confmodel==0.2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
from setuptools import setup, find_packages
setup(
name="go-contacts",
version="0.1.5a",
url='http://github.com/praekelt/go-contacts-api',
license='BSD',
description="A contacts and groups API for Vumi Go",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'cyclone',
'go_api==0.3.0',
'vumi>=0.5.2',
'vumi-go',
'confmodel==0.2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
Update to less strict version requirement for go_apifrom setuptools import setup, find_packages
setup(
name="go-contacts",
version="0.1.5a",
url='http://github.com/praekelt/go-contacts-api',
license='BSD',
description="A contacts and groups API for Vumi Go",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'cyclone',
'go_api>=0.3.0',
'vumi>=0.5.2',
'vumi-go',
'confmodel==0.2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="go-contacts",
version="0.1.5a",
url='http://github.com/praekelt/go-contacts-api',
license='BSD',
description="A contacts and groups API for Vumi Go",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'cyclone',
'go_api==0.3.0',
'vumi>=0.5.2',
'vumi-go',
'confmodel==0.2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
<commit_msg>Update to less strict version requirement for go_api<commit_after>from setuptools import setup, find_packages
setup(
name="go-contacts",
version="0.1.5a",
url='http://github.com/praekelt/go-contacts-api',
license='BSD',
description="A contacts and groups API for Vumi Go",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'cyclone',
'go_api>=0.3.0',
'vumi>=0.5.2',
'vumi-go',
'confmodel==0.2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
63e7c8782c179be3aa003c70ccf1bb7dcf24a39c
|
setup.py
|
setup.py
|
from distutils.core import setup
DESCRIPTION="""
"""
setup(
name="django-moneyfield",
description="Django Money Model Field",
long_description=DESCRIPTION,
version="0.2",
author="Carlos Palol",
author_email="carlos.palol@awarepixel.com",
url="http://www.awarepixel.com",
packages=[
'moneyfield'
],
requires=[
'django (>=1.5)',
'money',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries',
]
)
|
from distutils.core import setup
DESCRIPTION="""
"""
setup(
name="django-moneyfield",
description="Django Money Model Field",
long_description=DESCRIPTION,
version="0.2",
author="Carlos Palol",
author_email="carlos.palol@awarepixel.com",
url="https://github.com/carlospalol/django-moneyfield",
packages=[
'moneyfield'
],
requires=[
'django (>=1.5)',
'money',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries',
]
)
|
Use github page as homepage
|
Use github page as homepage
|
Python
|
mit
|
generalov/django-moneyfield,carlospalol/django-moneyfield
|
from distutils.core import setup
DESCRIPTION="""
"""
setup(
name="django-moneyfield",
description="Django Money Model Field",
long_description=DESCRIPTION,
version="0.2",
author="Carlos Palol",
author_email="carlos.palol@awarepixel.com",
url="http://www.awarepixel.com",
packages=[
'moneyfield'
],
requires=[
'django (>=1.5)',
'money',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries',
]
)
Use github page as homepage
|
from distutils.core import setup
DESCRIPTION="""
"""
setup(
name="django-moneyfield",
description="Django Money Model Field",
long_description=DESCRIPTION,
version="0.2",
author="Carlos Palol",
author_email="carlos.palol@awarepixel.com",
url="https://github.com/carlospalol/django-moneyfield",
packages=[
'moneyfield'
],
requires=[
'django (>=1.5)',
'money',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries',
]
)
|
<commit_before>from distutils.core import setup
DESCRIPTION="""
"""
setup(
name="django-moneyfield",
description="Django Money Model Field",
long_description=DESCRIPTION,
version="0.2",
author="Carlos Palol",
author_email="carlos.palol@awarepixel.com",
url="http://www.awarepixel.com",
packages=[
'moneyfield'
],
requires=[
'django (>=1.5)',
'money',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries',
]
)
<commit_msg>Use github page as homepage<commit_after>
|
from distutils.core import setup
DESCRIPTION="""
"""
setup(
name="django-moneyfield",
description="Django Money Model Field",
long_description=DESCRIPTION,
version="0.2",
author="Carlos Palol",
author_email="carlos.palol@awarepixel.com",
url="https://github.com/carlospalol/django-moneyfield",
packages=[
'moneyfield'
],
requires=[
'django (>=1.5)',
'money',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries',
]
)
|
from distutils.core import setup
DESCRIPTION="""
"""
setup(
name="django-moneyfield",
description="Django Money Model Field",
long_description=DESCRIPTION,
version="0.2",
author="Carlos Palol",
author_email="carlos.palol@awarepixel.com",
url="http://www.awarepixel.com",
packages=[
'moneyfield'
],
requires=[
'django (>=1.5)',
'money',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries',
]
)
Use github page as homepagefrom distutils.core import setup
DESCRIPTION="""
"""
setup(
name="django-moneyfield",
description="Django Money Model Field",
long_description=DESCRIPTION,
version="0.2",
author="Carlos Palol",
author_email="carlos.palol@awarepixel.com",
url="https://github.com/carlospalol/django-moneyfield",
packages=[
'moneyfield'
],
requires=[
'django (>=1.5)',
'money',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries',
]
)
|
<commit_before>from distutils.core import setup
DESCRIPTION="""
"""
setup(
name="django-moneyfield",
description="Django Money Model Field",
long_description=DESCRIPTION,
version="0.2",
author="Carlos Palol",
author_email="carlos.palol@awarepixel.com",
url="http://www.awarepixel.com",
packages=[
'moneyfield'
],
requires=[
'django (>=1.5)',
'money',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries',
]
)
<commit_msg>Use github page as homepage<commit_after>from distutils.core import setup
DESCRIPTION="""
"""
setup(
name="django-moneyfield",
description="Django Money Model Field",
long_description=DESCRIPTION,
version="0.2",
author="Carlos Palol",
author_email="carlos.palol@awarepixel.com",
url="https://github.com/carlospalol/django-moneyfield",
packages=[
'moneyfield'
],
requires=[
'django (>=1.5)',
'money',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries',
]
)
|
343de22958293ceedc3d86ecd9ebd97bf5747d55
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='adm-client',
version='0.1',
author='Andrew Druchenko',
author_email='bananos@dev.co.ua',
url='',
description='Python client for Amazon Device Messaging (ADM)',
long_description=open('README.rst').read(),
packages=['admclient'],
license="Apache 2.0",
keywords='adm push notification amazon device messaging android',
install_requires=['human_curl'],
classifiers = [ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules']
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='adm-client',
version='0.1',
author='Andrew Druchenko',
author_email='bananos@dev.co.ua',
url='',
description='Python client for Amazon Device Messaging (ADM)',
long_description=open('README.rst').read(),
packages=['admclient'],
license="Apache 2.0",
keywords='adm push notification amazon device messaging android',
install_requires=['https://github.com/Lispython/pycurl/archive/master.zip',
'https://github.com/Lispython/human_curl/archive/master.zip'],
#install_requires=['human_curl'],
classifiers = [ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules']
)
|
Define explicit url for required package deps
|
Define explicit url for required package deps
|
Python
|
apache-2.0
|
bananos/adm-client
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='adm-client',
version='0.1',
author='Andrew Druchenko',
author_email='bananos@dev.co.ua',
url='',
description='Python client for Amazon Device Messaging (ADM)',
long_description=open('README.rst').read(),
packages=['admclient'],
license="Apache 2.0",
keywords='adm push notification amazon device messaging android',
install_requires=['human_curl'],
classifiers = [ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules']
)
Define explicit url for required package deps
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='adm-client',
version='0.1',
author='Andrew Druchenko',
author_email='bananos@dev.co.ua',
url='',
description='Python client for Amazon Device Messaging (ADM)',
long_description=open('README.rst').read(),
packages=['admclient'],
license="Apache 2.0",
keywords='adm push notification amazon device messaging android',
install_requires=['https://github.com/Lispython/pycurl/archive/master.zip',
'https://github.com/Lispython/human_curl/archive/master.zip'],
#install_requires=['human_curl'],
classifiers = [ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules']
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='adm-client',
version='0.1',
author='Andrew Druchenko',
author_email='bananos@dev.co.ua',
url='',
description='Python client for Amazon Device Messaging (ADM)',
long_description=open('README.rst').read(),
packages=['admclient'],
license="Apache 2.0",
keywords='adm push notification amazon device messaging android',
install_requires=['human_curl'],
classifiers = [ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules']
)
<commit_msg>Define explicit url for required package deps<commit_after>
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='adm-client',
version='0.1',
author='Andrew Druchenko',
author_email='bananos@dev.co.ua',
url='',
description='Python client for Amazon Device Messaging (ADM)',
long_description=open('README.rst').read(),
packages=['admclient'],
license="Apache 2.0",
keywords='adm push notification amazon device messaging android',
install_requires=['https://github.com/Lispython/pycurl/archive/master.zip',
'https://github.com/Lispython/human_curl/archive/master.zip'],
#install_requires=['human_curl'],
classifiers = [ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules']
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='adm-client',
version='0.1',
author='Andrew Druchenko',
author_email='bananos@dev.co.ua',
url='',
description='Python client for Amazon Device Messaging (ADM)',
long_description=open('README.rst').read(),
packages=['admclient'],
license="Apache 2.0",
keywords='adm push notification amazon device messaging android',
install_requires=['human_curl'],
classifiers = [ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules']
)
Define explicit url for required package depstry:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='adm-client',
version='0.1',
author='Andrew Druchenko',
author_email='bananos@dev.co.ua',
url='',
description='Python client for Amazon Device Messaging (ADM)',
long_description=open('README.rst').read(),
packages=['admclient'],
license="Apache 2.0",
keywords='adm push notification amazon device messaging android',
install_requires=['https://github.com/Lispython/pycurl/archive/master.zip',
'https://github.com/Lispython/human_curl/archive/master.zip'],
#install_requires=['human_curl'],
classifiers = [ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules']
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='adm-client',
version='0.1',
author='Andrew Druchenko',
author_email='bananos@dev.co.ua',
url='',
description='Python client for Amazon Device Messaging (ADM)',
long_description=open('README.rst').read(),
packages=['admclient'],
license="Apache 2.0",
keywords='adm push notification amazon device messaging android',
install_requires=['human_curl'],
classifiers = [ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules']
)
<commit_msg>Define explicit url for required package deps<commit_after>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='adm-client',
version='0.1',
author='Andrew Druchenko',
author_email='bananos@dev.co.ua',
url='',
description='Python client for Amazon Device Messaging (ADM)',
long_description=open('README.rst').read(),
packages=['admclient'],
license="Apache 2.0",
keywords='adm push notification amazon device messaging android',
install_requires=['https://github.com/Lispython/pycurl/archive/master.zip',
'https://github.com/Lispython/human_curl/archive/master.zip'],
#install_requires=['human_curl'],
classifiers = [ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules']
)
|
e15cf381307d79d227f4f0fab94f731591f6f639
|
setup.py
|
setup.py
|
import os
import sys
from distutils.core import setup
# Don't import stripe module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'stripe'))
import importer
import version
path, script = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(path))
# Get simplejson if we don't already have json
install_requires = 'requests'
try:
importer.import_json()
except ImportError:
install_requires.append('simplejson')
try:
import json
_json_loaded = hasattr(json, 'loads')
except ImportError:
pass
setup(name='stripe',
version=version.VERSION,
description='Stripe python bindings',
author='Stripe',
author_email='support@stripe.com',
url='https://stripe.com/',
packages=['stripe'],
package_data={'stripe' : ['data/ca-certificates.crt', '../VERSION']},
install_requires=install_requires
)
|
import os
import sys
from distutils.core import setup
# Don't import stripe module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'stripe'))
import importer
import version
path, script = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(path))
# Get simplejson if we don't already have json
install_requires = ['requests']
try:
importer.import_json()
except ImportError:
install_requires.append('simplejson')
try:
import json
_json_loaded = hasattr(json, 'loads')
except ImportError:
pass
setup(name='stripe',
version=version.VERSION,
description='Stripe python bindings',
author='Stripe',
author_email='support@stripe.com',
url='https://stripe.com/',
packages=['stripe'],
package_data={'stripe' : ['data/ca-certificates.crt', '../VERSION']},
install_requires=install_requires
)
|
Make install_requires into an array
|
Make install_requires into an array
|
Python
|
mit
|
zenmeso/stripe-python,HashNuke/stripe-python,koobs/stripe-python,alexmic/stripe-python,speedplane/stripe-python,uploadcare/stripe-python,Khan/stripe-python,stripe/stripe-python,NahomAgidew/stripe-python,woodb/stripe-python
|
import os
import sys
from distutils.core import setup
# Don't import stripe module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'stripe'))
import importer
import version
path, script = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(path))
# Get simplejson if we don't already have json
install_requires = 'requests'
try:
importer.import_json()
except ImportError:
install_requires.append('simplejson')
try:
import json
_json_loaded = hasattr(json, 'loads')
except ImportError:
pass
setup(name='stripe',
version=version.VERSION,
description='Stripe python bindings',
author='Stripe',
author_email='support@stripe.com',
url='https://stripe.com/',
packages=['stripe'],
package_data={'stripe' : ['data/ca-certificates.crt', '../VERSION']},
install_requires=install_requires
)
Make install_requires into an array
|
import os
import sys
from distutils.core import setup
# Don't import stripe module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'stripe'))
import importer
import version
path, script = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(path))
# Get simplejson if we don't already have json
install_requires = ['requests']
try:
importer.import_json()
except ImportError:
install_requires.append('simplejson')
try:
import json
_json_loaded = hasattr(json, 'loads')
except ImportError:
pass
setup(name='stripe',
version=version.VERSION,
description='Stripe python bindings',
author='Stripe',
author_email='support@stripe.com',
url='https://stripe.com/',
packages=['stripe'],
package_data={'stripe' : ['data/ca-certificates.crt', '../VERSION']},
install_requires=install_requires
)
|
<commit_before>import os
import sys
from distutils.core import setup
# Don't import stripe module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'stripe'))
import importer
import version
path, script = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(path))
# Get simplejson if we don't already have json
install_requires = 'requests'
try:
importer.import_json()
except ImportError:
install_requires.append('simplejson')
try:
import json
_json_loaded = hasattr(json, 'loads')
except ImportError:
pass
setup(name='stripe',
version=version.VERSION,
description='Stripe python bindings',
author='Stripe',
author_email='support@stripe.com',
url='https://stripe.com/',
packages=['stripe'],
package_data={'stripe' : ['data/ca-certificates.crt', '../VERSION']},
install_requires=install_requires
)
<commit_msg>Make install_requires into an array<commit_after>
|
import os
import sys
from distutils.core import setup
# Don't import stripe module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'stripe'))
import importer
import version
path, script = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(path))
# Get simplejson if we don't already have json
install_requires = ['requests']
try:
importer.import_json()
except ImportError:
install_requires.append('simplejson')
try:
import json
_json_loaded = hasattr(json, 'loads')
except ImportError:
pass
setup(name='stripe',
version=version.VERSION,
description='Stripe python bindings',
author='Stripe',
author_email='support@stripe.com',
url='https://stripe.com/',
packages=['stripe'],
package_data={'stripe' : ['data/ca-certificates.crt', '../VERSION']},
install_requires=install_requires
)
|
import os
import sys
from distutils.core import setup
# Don't import stripe module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'stripe'))
import importer
import version
path, script = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(path))
# Get simplejson if we don't already have json
install_requires = 'requests'
try:
importer.import_json()
except ImportError:
install_requires.append('simplejson')
try:
import json
_json_loaded = hasattr(json, 'loads')
except ImportError:
pass
setup(name='stripe',
version=version.VERSION,
description='Stripe python bindings',
author='Stripe',
author_email='support@stripe.com',
url='https://stripe.com/',
packages=['stripe'],
package_data={'stripe' : ['data/ca-certificates.crt', '../VERSION']},
install_requires=install_requires
)
Make install_requires into an arrayimport os
import sys
from distutils.core import setup
# Don't import stripe module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'stripe'))
import importer
import version
path, script = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(path))
# Get simplejson if we don't already have json
install_requires = ['requests']
try:
importer.import_json()
except ImportError:
install_requires.append('simplejson')
try:
import json
_json_loaded = hasattr(json, 'loads')
except ImportError:
pass
setup(name='stripe',
version=version.VERSION,
description='Stripe python bindings',
author='Stripe',
author_email='support@stripe.com',
url='https://stripe.com/',
packages=['stripe'],
package_data={'stripe' : ['data/ca-certificates.crt', '../VERSION']},
install_requires=install_requires
)
|
<commit_before>import os
import sys
from distutils.core import setup
# Don't import stripe module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'stripe'))
import importer
import version
path, script = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(path))
# Get simplejson if we don't already have json
install_requires = 'requests'
try:
importer.import_json()
except ImportError:
install_requires.append('simplejson')
try:
import json
_json_loaded = hasattr(json, 'loads')
except ImportError:
pass
setup(name='stripe',
version=version.VERSION,
description='Stripe python bindings',
author='Stripe',
author_email='support@stripe.com',
url='https://stripe.com/',
packages=['stripe'],
package_data={'stripe' : ['data/ca-certificates.crt', '../VERSION']},
install_requires=install_requires
)
<commit_msg>Make install_requires into an array<commit_after>import os
import sys
from distutils.core import setup
# Don't import stripe module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'stripe'))
import importer
import version
path, script = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(path))
# Get simplejson if we don't already have json
install_requires = ['requests']
try:
importer.import_json()
except ImportError:
install_requires.append('simplejson')
try:
import json
_json_loaded = hasattr(json, 'loads')
except ImportError:
pass
setup(name='stripe',
version=version.VERSION,
description='Stripe python bindings',
author='Stripe',
author_email='support@stripe.com',
url='https://stripe.com/',
packages=['stripe'],
package_data={'stripe' : ['data/ca-certificates.crt', '../VERSION']},
install_requires=install_requires
)
|
f0811d8dd5a6e2f43c6821b7e827810106719b6e
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup
from setuptools import find_packages
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Environment :: Web Environment",
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
]
requires = ["requests>=0.11.1", ]
# This might not be the best idea.
try:
import json
except ImportError:
requires.append('simplejson>=2.0')
setup(name='pyspotlight',
version='0.5.3',
license='BSD',
url='https://github.com/newsgrape/pyspotlight',
packages=find_packages(),
description='Python interface to the DBPedia Spotlight REST API',
long_description=open('README.rst').read(),
keywords="dbpedia spotlight semantic",
classifiers=classifiers,
install_requires=requires,
)
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup
from setuptools import find_packages
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Environment :: Web Environment",
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
]
requires = ["requests==0.14.1", ]
# This might not be the best idea.
try:
import json
except ImportError:
requires.append('simplejson>=2.0')
setup(name='pyspotlight',
version='0.5.3',
license='BSD',
url='https://github.com/newsgrape/pyspotlight',
packages=find_packages(),
description='Python interface to the DBPedia Spotlight REST API',
long_description=open('README.rst').read(),
keywords="dbpedia spotlight semantic",
classifiers=classifiers,
install_requires=requires,
)
|
Use requests 0.14.1 from now on.
|
Use requests 0.14.1 from now on.
|
Python
|
bsd-2-clause
|
aolieman/pyspotlight,ubergrape/pyspotlight
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup
from setuptools import find_packages
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Environment :: Web Environment",
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
]
requires = ["requests>=0.11.1", ]
# This might not be the best idea.
try:
import json
except ImportError:
requires.append('simplejson>=2.0')
setup(name='pyspotlight',
version='0.5.3',
license='BSD',
url='https://github.com/newsgrape/pyspotlight',
packages=find_packages(),
description='Python interface to the DBPedia Spotlight REST API',
long_description=open('README.rst').read(),
keywords="dbpedia spotlight semantic",
classifiers=classifiers,
install_requires=requires,
)
Use requests 0.14.1 from now on.
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup
from setuptools import find_packages
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Environment :: Web Environment",
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
]
requires = ["requests==0.14.1", ]
# This might not be the best idea.
try:
import json
except ImportError:
requires.append('simplejson>=2.0')
setup(name='pyspotlight',
version='0.5.3',
license='BSD',
url='https://github.com/newsgrape/pyspotlight',
packages=find_packages(),
description='Python interface to the DBPedia Spotlight REST API',
long_description=open('README.rst').read(),
keywords="dbpedia spotlight semantic",
classifiers=classifiers,
install_requires=requires,
)
|
<commit_before>#!/usr/bin/env python
# coding: utf-8
from setuptools import setup
from setuptools import find_packages
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Environment :: Web Environment",
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
]
requires = ["requests>=0.11.1", ]
# This might not be the best idea.
try:
import json
except ImportError:
requires.append('simplejson>=2.0')
setup(name='pyspotlight',
version='0.5.3',
license='BSD',
url='https://github.com/newsgrape/pyspotlight',
packages=find_packages(),
description='Python interface to the DBPedia Spotlight REST API',
long_description=open('README.rst').read(),
keywords="dbpedia spotlight semantic",
classifiers=classifiers,
install_requires=requires,
)
<commit_msg>Use requests 0.14.1 from now on.<commit_after>
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup
from setuptools import find_packages
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Environment :: Web Environment",
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
]
requires = ["requests==0.14.1", ]
# This might not be the best idea.
try:
import json
except ImportError:
requires.append('simplejson>=2.0')
setup(name='pyspotlight',
version='0.5.3',
license='BSD',
url='https://github.com/newsgrape/pyspotlight',
packages=find_packages(),
description='Python interface to the DBPedia Spotlight REST API',
long_description=open('README.rst').read(),
keywords="dbpedia spotlight semantic",
classifiers=classifiers,
install_requires=requires,
)
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup
from setuptools import find_packages
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Environment :: Web Environment",
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
]
requires = ["requests>=0.11.1", ]
# This might not be the best idea.
try:
import json
except ImportError:
requires.append('simplejson>=2.0')
setup(name='pyspotlight',
version='0.5.3',
license='BSD',
url='https://github.com/newsgrape/pyspotlight',
packages=find_packages(),
description='Python interface to the DBPedia Spotlight REST API',
long_description=open('README.rst').read(),
keywords="dbpedia spotlight semantic",
classifiers=classifiers,
install_requires=requires,
)
Use requests 0.14.1 from now on.#!/usr/bin/env python
# coding: utf-8
from setuptools import setup
from setuptools import find_packages
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Environment :: Web Environment",
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
]
requires = ["requests==0.14.1", ]
# This might not be the best idea.
try:
import json
except ImportError:
requires.append('simplejson>=2.0')
setup(name='pyspotlight',
version='0.5.3',
license='BSD',
url='https://github.com/newsgrape/pyspotlight',
packages=find_packages(),
description='Python interface to the DBPedia Spotlight REST API',
long_description=open('README.rst').read(),
keywords="dbpedia spotlight semantic",
classifiers=classifiers,
install_requires=requires,
)
|
<commit_before>#!/usr/bin/env python
# coding: utf-8
from setuptools import setup
from setuptools import find_packages
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Environment :: Web Environment",
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
]
requires = ["requests>=0.11.1", ]
# This might not be the best idea.
try:
import json
except ImportError:
requires.append('simplejson>=2.0')
setup(name='pyspotlight',
version='0.5.3',
license='BSD',
url='https://github.com/newsgrape/pyspotlight',
packages=find_packages(),
description='Python interface to the DBPedia Spotlight REST API',
long_description=open('README.rst').read(),
keywords="dbpedia spotlight semantic",
classifiers=classifiers,
install_requires=requires,
)
<commit_msg>Use requests 0.14.1 from now on.<commit_after>#!/usr/bin/env python
# coding: utf-8
from setuptools import setup
from setuptools import find_packages
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Environment :: Web Environment",
"License :: OSI Approved :: BSD License",
"Development Status :: 5 - Production/Stable",
]
requires = ["requests==0.14.1", ]
# This might not be the best idea.
try:
import json
except ImportError:
requires.append('simplejson>=2.0')
setup(name='pyspotlight',
version='0.5.3',
license='BSD',
url='https://github.com/newsgrape/pyspotlight',
packages=find_packages(),
description='Python interface to the DBPedia Spotlight REST API',
long_description=open('README.rst').read(),
keywords="dbpedia spotlight semantic",
classifiers=classifiers,
install_requires=requires,
)
|
ec1cff26d48b9fac3dd9bab2e33b17d3faea67e8
|
setup.py
|
setup.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = "0.13.0"
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'pyramid',
'six',
'py_zipkin',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = "0.13.0"
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
)
|
Sort package list in required installs
|
Sort package list in required installs
|
Python
|
apache-2.0
|
bplotnick/pyramid_zipkin,Yelp/pyramid_zipkin
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = "0.13.0"
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'pyramid',
'six',
'py_zipkin',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
)
Sort package list in required installs
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = "0.13.0"
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
)
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = "0.13.0"
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'pyramid',
'six',
'py_zipkin',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
)
<commit_msg>Sort package list in required installs<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = "0.13.0"
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = "0.13.0"
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'pyramid',
'six',
'py_zipkin',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
)
Sort package list in required installs#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = "0.13.0"
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
)
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = "0.13.0"
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'pyramid',
'six',
'py_zipkin',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
)
<commit_msg>Sort package list in required installs<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = "0.13.0"
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
)
|
f36cc2d92c25d0a79b94647e64c26e74f44cf0da
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='switches',
version='0.1.0',
description='Friendly Command Line Scripts',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/switches',
py_modules=['switches'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='switches',
version='0.1.0',
description='Friendly Command Line Scripts',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/switches',
py_modules=['switches'],
package_data={'': ['LICENSE']},
include_package_data=True,
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
Add LICENSE to package data
|
Add LICENSE to package data
The LICENSE file isn't included with the version found on PyPI. Including it in the `package_data` argument passed to `setup` should fix this.
|
Python
|
bsd-3-clause
|
dirn/switches,dirn/switches
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='switches',
version='0.1.0',
description='Friendly Command Line Scripts',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/switches',
py_modules=['switches'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
Add LICENSE to package data
The LICENSE file isn't included with the version found on PyPI. Including it in the `package_data` argument passed to `setup` should fix this.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='switches',
version='0.1.0',
description='Friendly Command Line Scripts',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/switches',
py_modules=['switches'],
package_data={'': ['LICENSE']},
include_package_data=True,
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='switches',
version='0.1.0',
description='Friendly Command Line Scripts',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/switches',
py_modules=['switches'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
<commit_msg>Add LICENSE to package data
The LICENSE file isn't included with the version found on PyPI. Including it in the `package_data` argument passed to `setup` should fix this.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='switches',
version='0.1.0',
description='Friendly Command Line Scripts',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/switches',
py_modules=['switches'],
package_data={'': ['LICENSE']},
include_package_data=True,
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='switches',
version='0.1.0',
description='Friendly Command Line Scripts',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/switches',
py_modules=['switches'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
Add LICENSE to package data
The LICENSE file isn't included with the version found on PyPI. Including it in the `package_data` argument passed to `setup` should fix this.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='switches',
version='0.1.0',
description='Friendly Command Line Scripts',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/switches',
py_modules=['switches'],
package_data={'': ['LICENSE']},
include_package_data=True,
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='switches',
version='0.1.0',
description='Friendly Command Line Scripts',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/switches',
py_modules=['switches'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
<commit_msg>Add LICENSE to package data
The LICENSE file isn't included with the version found on PyPI. Including it in the `package_data` argument passed to `setup` should fix this.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='switches',
version='0.1.0',
description='Friendly Command Line Scripts',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/switches',
py_modules=['switches'],
package_data={'': ['LICENSE']},
include_package_data=True,
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
55afba02b9ec0de224144ed505590dffea836598
|
setup.py
|
setup.py
|
#!/usr/bin/env python2
from distutils.core import setup
setup(
name = 'periodical',
description = 'Create a Kindle periodical from given URL(s)',
version = '0.1.0',
author = 'Tom Vincent',
author_email = 'http://tlvince.com/contact/',
url = 'https://github.com/tlvince/periodical',
license = 'MIT',
scripts = ['src/periodical.py'],
requires = ['beautifulsoup4', 'boilerpipe'],
classifiers = [
'Programming Language :: Python :: 2',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business',
'Topic :: Text Processing :: Markup :: HTML',
]
)
|
#!/usr/bin/env python2
from distutils.core import setup
setup(
name = 'periodical',
description = 'Create a Kindle periodical from given URL(s)',
version = '0.1.0',
author = 'Tom Vincent',
author_email = 'http://tlvince.com/contact/',
url = 'https://github.com/tlvince/periodical',
license = 'MIT',
scripts = ['src/periodical.py'],
requires = ['beautifulsoup4', 'boilerpipe', 'yaml'],
classifiers = [
'Programming Language :: Python :: 2',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business',
'Topic :: Text Processing :: Markup :: HTML',
]
)
|
Add yaml to requires list
|
Add yaml to requires list
|
Python
|
mit
|
tlvince/periodical
|
#!/usr/bin/env python2
from distutils.core import setup
setup(
name = 'periodical',
description = 'Create a Kindle periodical from given URL(s)',
version = '0.1.0',
author = 'Tom Vincent',
author_email = 'http://tlvince.com/contact/',
url = 'https://github.com/tlvince/periodical',
license = 'MIT',
scripts = ['src/periodical.py'],
requires = ['beautifulsoup4', 'boilerpipe'],
classifiers = [
'Programming Language :: Python :: 2',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business',
'Topic :: Text Processing :: Markup :: HTML',
]
)
Add yaml to requires list
|
#!/usr/bin/env python2
from distutils.core import setup
setup(
name = 'periodical',
description = 'Create a Kindle periodical from given URL(s)',
version = '0.1.0',
author = 'Tom Vincent',
author_email = 'http://tlvince.com/contact/',
url = 'https://github.com/tlvince/periodical',
license = 'MIT',
scripts = ['src/periodical.py'],
requires = ['beautifulsoup4', 'boilerpipe', 'yaml'],
classifiers = [
'Programming Language :: Python :: 2',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business',
'Topic :: Text Processing :: Markup :: HTML',
]
)
|
<commit_before>#!/usr/bin/env python2
from distutils.core import setup
setup(
name = 'periodical',
description = 'Create a Kindle periodical from given URL(s)',
version = '0.1.0',
author = 'Tom Vincent',
author_email = 'http://tlvince.com/contact/',
url = 'https://github.com/tlvince/periodical',
license = 'MIT',
scripts = ['src/periodical.py'],
requires = ['beautifulsoup4', 'boilerpipe'],
classifiers = [
'Programming Language :: Python :: 2',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business',
'Topic :: Text Processing :: Markup :: HTML',
]
)
<commit_msg>Add yaml to requires list<commit_after>
|
#!/usr/bin/env python2
from distutils.core import setup
setup(
name = 'periodical',
description = 'Create a Kindle periodical from given URL(s)',
version = '0.1.0',
author = 'Tom Vincent',
author_email = 'http://tlvince.com/contact/',
url = 'https://github.com/tlvince/periodical',
license = 'MIT',
scripts = ['src/periodical.py'],
requires = ['beautifulsoup4', 'boilerpipe', 'yaml'],
classifiers = [
'Programming Language :: Python :: 2',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business',
'Topic :: Text Processing :: Markup :: HTML',
]
)
|
#!/usr/bin/env python2
from distutils.core import setup
setup(
name = 'periodical',
description = 'Create a Kindle periodical from given URL(s)',
version = '0.1.0',
author = 'Tom Vincent',
author_email = 'http://tlvince.com/contact/',
url = 'https://github.com/tlvince/periodical',
license = 'MIT',
scripts = ['src/periodical.py'],
requires = ['beautifulsoup4', 'boilerpipe'],
classifiers = [
'Programming Language :: Python :: 2',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business',
'Topic :: Text Processing :: Markup :: HTML',
]
)
Add yaml to requires list#!/usr/bin/env python2
from distutils.core import setup
setup(
name = 'periodical',
description = 'Create a Kindle periodical from given URL(s)',
version = '0.1.0',
author = 'Tom Vincent',
author_email = 'http://tlvince.com/contact/',
url = 'https://github.com/tlvince/periodical',
license = 'MIT',
scripts = ['src/periodical.py'],
requires = ['beautifulsoup4', 'boilerpipe', 'yaml'],
classifiers = [
'Programming Language :: Python :: 2',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business',
'Topic :: Text Processing :: Markup :: HTML',
]
)
|
<commit_before>#!/usr/bin/env python2
from distutils.core import setup
setup(
name = 'periodical',
description = 'Create a Kindle periodical from given URL(s)',
version = '0.1.0',
author = 'Tom Vincent',
author_email = 'http://tlvince.com/contact/',
url = 'https://github.com/tlvince/periodical',
license = 'MIT',
scripts = ['src/periodical.py'],
requires = ['beautifulsoup4', 'boilerpipe'],
classifiers = [
'Programming Language :: Python :: 2',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business',
'Topic :: Text Processing :: Markup :: HTML',
]
)
<commit_msg>Add yaml to requires list<commit_after>#!/usr/bin/env python2
from distutils.core import setup
setup(
name = 'periodical',
description = 'Create a Kindle periodical from given URL(s)',
version = '0.1.0',
author = 'Tom Vincent',
author_email = 'http://tlvince.com/contact/',
url = 'https://github.com/tlvince/periodical',
license = 'MIT',
scripts = ['src/periodical.py'],
requires = ['beautifulsoup4', 'boilerpipe', 'yaml'],
classifiers = [
'Programming Language :: Python :: 2',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business',
'Topic :: Text Processing :: Markup :: HTML',
]
)
|
7f5186caa6b59df412d62b052406dbe675b9e463
|
OpenSearchInNewTab.py
|
OpenSearchInNewTab.py
|
import sublime_plugin
DEFAULT_NAME = 'Find Results'
ALT_NAME = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if view.name() == DEFAULT_NAME:
view.set_name(ALT_NAME)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if view.name() == ALT_NAME:
view.set_name(DEFAULT_NAME)
def post_text_command(self, view, command_name, args):
if view.name() == DEFAULT_NAME:
view.set_name(ALT_NAME)
|
import sublime_plugin
DEFAULT_NAME = 'Find Results'
ALT_NAME = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if self.is_search_view(view):
self.apply_alt_name(view)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if self.is_search_view(view):
view.set_name(DEFAULT_NAME)
def post_text_command(self, view, command_name, args):
if self.is_search_view(view):
self.apply_alt_name(view)
def apply_alt_name(self, view):
view.set_name(ALT_NAME)
def is_search_view(self, view):
name = view.name()
return name == ALT_NAME or name == DEFAULT_NAME
|
Refactor API to a more readable form
|
Refactor API to a more readable form
|
Python
|
mit
|
everyonesdesign/OpenSearchInNewTab
|
import sublime_plugin
DEFAULT_NAME = 'Find Results'
ALT_NAME = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if view.name() == DEFAULT_NAME:
view.set_name(ALT_NAME)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if view.name() == ALT_NAME:
view.set_name(DEFAULT_NAME)
def post_text_command(self, view, command_name, args):
if view.name() == DEFAULT_NAME:
view.set_name(ALT_NAME)
Refactor API to a more readable form
|
import sublime_plugin
DEFAULT_NAME = 'Find Results'
ALT_NAME = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if self.is_search_view(view):
self.apply_alt_name(view)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if self.is_search_view(view):
view.set_name(DEFAULT_NAME)
def post_text_command(self, view, command_name, args):
if self.is_search_view(view):
self.apply_alt_name(view)
def apply_alt_name(self, view):
view.set_name(ALT_NAME)
def is_search_view(self, view):
name = view.name()
return name == ALT_NAME or name == DEFAULT_NAME
|
<commit_before>import sublime_plugin
DEFAULT_NAME = 'Find Results'
ALT_NAME = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if view.name() == DEFAULT_NAME:
view.set_name(ALT_NAME)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if view.name() == ALT_NAME:
view.set_name(DEFAULT_NAME)
def post_text_command(self, view, command_name, args):
if view.name() == DEFAULT_NAME:
view.set_name(ALT_NAME)
<commit_msg>Refactor API to a more readable form<commit_after>
|
import sublime_plugin
DEFAULT_NAME = 'Find Results'
ALT_NAME = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if self.is_search_view(view):
self.apply_alt_name(view)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if self.is_search_view(view):
view.set_name(DEFAULT_NAME)
def post_text_command(self, view, command_name, args):
if self.is_search_view(view):
self.apply_alt_name(view)
def apply_alt_name(self, view):
view.set_name(ALT_NAME)
def is_search_view(self, view):
name = view.name()
return name == ALT_NAME or name == DEFAULT_NAME
|
import sublime_plugin
DEFAULT_NAME = 'Find Results'
ALT_NAME = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if view.name() == DEFAULT_NAME:
view.set_name(ALT_NAME)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if view.name() == ALT_NAME:
view.set_name(DEFAULT_NAME)
def post_text_command(self, view, command_name, args):
if view.name() == DEFAULT_NAME:
view.set_name(ALT_NAME)
Refactor API to a more readable formimport sublime_plugin
DEFAULT_NAME = 'Find Results'
ALT_NAME = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if self.is_search_view(view):
self.apply_alt_name(view)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if self.is_search_view(view):
view.set_name(DEFAULT_NAME)
def post_text_command(self, view, command_name, args):
if self.is_search_view(view):
self.apply_alt_name(view)
def apply_alt_name(self, view):
view.set_name(ALT_NAME)
def is_search_view(self, view):
name = view.name()
return name == ALT_NAME or name == DEFAULT_NAME
|
<commit_before>import sublime_plugin
DEFAULT_NAME = 'Find Results'
ALT_NAME = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if view.name() == DEFAULT_NAME:
view.set_name(ALT_NAME)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if view.name() == ALT_NAME:
view.set_name(DEFAULT_NAME)
def post_text_command(self, view, command_name, args):
if view.name() == DEFAULT_NAME:
view.set_name(ALT_NAME)
<commit_msg>Refactor API to a more readable form<commit_after>import sublime_plugin
DEFAULT_NAME = 'Find Results'
ALT_NAME = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if self.is_search_view(view):
self.apply_alt_name(view)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if self.is_search_view(view):
view.set_name(DEFAULT_NAME)
def post_text_command(self, view, command_name, args):
if self.is_search_view(view):
self.apply_alt_name(view)
def apply_alt_name(self, view):
view.set_name(ALT_NAME)
def is_search_view(self, view):
name = view.name()
return name == ALT_NAME or name == DEFAULT_NAME
|
ad1ac318d87a16aab0b55e3ccd238c769b1f3e0a
|
setup.py
|
setup.py
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
setup(
name='gis_metadata_parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
keywords='fgdc,iso,ISO-19115,metadata,xml,parser',
version='0.2.3',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'parserutils', 'six'
],
url='https://github.com/consbio/gis_metadata_parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
setup(
name='gis_metadata_parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
keywords='fgdc,iso,ISO-19115,ISO-19139,metadata,xml,parser',
version='0.3.0',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'parserutils', 'six'
],
url='https://github.com/consbio/gis_metadata_parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
Tag increment and new keyword.
|
Tag increment and new keyword.
|
Python
|
bsd-3-clause
|
consbio/gis-metadata-parser
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
setup(
name='gis_metadata_parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
keywords='fgdc,iso,ISO-19115,metadata,xml,parser',
version='0.2.3',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'parserutils', 'six'
],
url='https://github.com/consbio/gis_metadata_parser',
license='BSD',
cmdclass={'test': RunTests}
)
Tag increment and new keyword.
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
setup(
name='gis_metadata_parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
keywords='fgdc,iso,ISO-19115,ISO-19139,metadata,xml,parser',
version='0.3.0',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'parserutils', 'six'
],
url='https://github.com/consbio/gis_metadata_parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
<commit_before>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
setup(
name='gis_metadata_parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
keywords='fgdc,iso,ISO-19115,metadata,xml,parser',
version='0.2.3',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'parserutils', 'six'
],
url='https://github.com/consbio/gis_metadata_parser',
license='BSD',
cmdclass={'test': RunTests}
)
<commit_msg>Tag increment and new keyword.<commit_after>
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
setup(
name='gis_metadata_parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
keywords='fgdc,iso,ISO-19115,ISO-19139,metadata,xml,parser',
version='0.3.0',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'parserutils', 'six'
],
url='https://github.com/consbio/gis_metadata_parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
setup(
name='gis_metadata_parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
keywords='fgdc,iso,ISO-19115,metadata,xml,parser',
version='0.2.3',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'parserutils', 'six'
],
url='https://github.com/consbio/gis_metadata_parser',
license='BSD',
cmdclass={'test': RunTests}
)
Tag increment and new keyword.import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
setup(
name='gis_metadata_parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
keywords='fgdc,iso,ISO-19115,ISO-19139,metadata,xml,parser',
version='0.3.0',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'parserutils', 'six'
],
url='https://github.com/consbio/gis_metadata_parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
<commit_before>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
setup(
name='gis_metadata_parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
keywords='fgdc,iso,ISO-19115,metadata,xml,parser',
version='0.2.3',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'parserutils', 'six'
],
url='https://github.com/consbio/gis_metadata_parser',
license='BSD',
cmdclass={'test': RunTests}
)
<commit_msg>Tag increment and new keyword.<commit_after>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
setup(
name='gis_metadata_parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
keywords='fgdc,iso,ISO-19115,ISO-19139,metadata,xml,parser',
version='0.3.0',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'parserutils', 'six'
],
url='https://github.com/consbio/gis_metadata_parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
7fbd455206de035b2deb46082d7f331e1a87e806
|
setup.py
|
setup.py
|
import setuptools
setuptools.setup(
python_requires='<3.8',
entry_points={
"console_scripts": [
"microscopeimagequality=microscopeimagequality.application:command"
]
},
install_requires=[
"click",
"matplotlib",
"nose",
"numpy<1.19.0,>=1.16.0",
"Pillow",
"scikit-image",
"scipy",
"six",
"tensorflow==2.5.1",
"imagecodecs",
],
test_requires=["pytest"],
name="microscopeimagequality",
package_data={
"microscopeimagequality": [
"data/"
]
},
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'],
description="Microscope Image Quality Classification",
url='https://github.com/google/microscopeimagequality',
author='Samuel Yang',
author_email='samuely@google.com',
license='Apache 2.0',
packages=setuptools.find_packages(
exclude=[
"tests"
]
),
version="0.1.0dev5"
)
|
import setuptools
setuptools.setup(
python_requires='<3.8',
entry_points={
"console_scripts": [
"microscopeimagequality=microscopeimagequality.application:command"
]
},
install_requires=[
"click",
"matplotlib",
"nose",
"numpy<1.19.0,>=1.16.0",
"Pillow",
"scikit-image",
"scipy",
"six",
"tensorflow==2.5.2",
"imagecodecs",
],
test_requires=["pytest"],
name="microscopeimagequality",
package_data={
"microscopeimagequality": [
"data/"
]
},
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'],
description="Microscope Image Quality Classification",
url='https://github.com/google/microscopeimagequality',
author='Samuel Yang',
author_email='samuely@google.com',
license='Apache 2.0',
packages=setuptools.find_packages(
exclude=[
"tests"
]
),
version="0.1.0dev5"
)
|
Bump tensorflow from 2.5.1 to 2.5.2
|
Bump tensorflow from 2.5.1 to 2.5.2
Bumps [tensorflow](https://github.com/tensorflow/tensorflow) from 2.5.1 to 2.5.2.
- [Release notes](https://github.com/tensorflow/tensorflow/releases)
- [Changelog](https://github.com/tensorflow/tensorflow/blob/master/RELEASE.md)
- [Commits](https://github.com/tensorflow/tensorflow/compare/v2.5.1...v2.5.2)
---
updated-dependencies:
- dependency-name: tensorflow
dependency-type: direct:production
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com>
|
Python
|
apache-2.0
|
google/microscopeimagequality,google/microscopeimagequality
|
import setuptools
setuptools.setup(
python_requires='<3.8',
entry_points={
"console_scripts": [
"microscopeimagequality=microscopeimagequality.application:command"
]
},
install_requires=[
"click",
"matplotlib",
"nose",
"numpy<1.19.0,>=1.16.0",
"Pillow",
"scikit-image",
"scipy",
"six",
"tensorflow==2.5.1",
"imagecodecs",
],
test_requires=["pytest"],
name="microscopeimagequality",
package_data={
"microscopeimagequality": [
"data/"
]
},
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'],
description="Microscope Image Quality Classification",
url='https://github.com/google/microscopeimagequality',
author='Samuel Yang',
author_email='samuely@google.com',
license='Apache 2.0',
packages=setuptools.find_packages(
exclude=[
"tests"
]
),
version="0.1.0dev5"
)
Bump tensorflow from 2.5.1 to 2.5.2
Bumps [tensorflow](https://github.com/tensorflow/tensorflow) from 2.5.1 to 2.5.2.
- [Release notes](https://github.com/tensorflow/tensorflow/releases)
- [Changelog](https://github.com/tensorflow/tensorflow/blob/master/RELEASE.md)
- [Commits](https://github.com/tensorflow/tensorflow/compare/v2.5.1...v2.5.2)
---
updated-dependencies:
- dependency-name: tensorflow
dependency-type: direct:production
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com>
|
import setuptools
setuptools.setup(
python_requires='<3.8',
entry_points={
"console_scripts": [
"microscopeimagequality=microscopeimagequality.application:command"
]
},
install_requires=[
"click",
"matplotlib",
"nose",
"numpy<1.19.0,>=1.16.0",
"Pillow",
"scikit-image",
"scipy",
"six",
"tensorflow==2.5.2",
"imagecodecs",
],
test_requires=["pytest"],
name="microscopeimagequality",
package_data={
"microscopeimagequality": [
"data/"
]
},
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'],
description="Microscope Image Quality Classification",
url='https://github.com/google/microscopeimagequality',
author='Samuel Yang',
author_email='samuely@google.com',
license='Apache 2.0',
packages=setuptools.find_packages(
exclude=[
"tests"
]
),
version="0.1.0dev5"
)
|
<commit_before>import setuptools
setuptools.setup(
python_requires='<3.8',
entry_points={
"console_scripts": [
"microscopeimagequality=microscopeimagequality.application:command"
]
},
install_requires=[
"click",
"matplotlib",
"nose",
"numpy<1.19.0,>=1.16.0",
"Pillow",
"scikit-image",
"scipy",
"six",
"tensorflow==2.5.1",
"imagecodecs",
],
test_requires=["pytest"],
name="microscopeimagequality",
package_data={
"microscopeimagequality": [
"data/"
]
},
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'],
description="Microscope Image Quality Classification",
url='https://github.com/google/microscopeimagequality',
author='Samuel Yang',
author_email='samuely@google.com',
license='Apache 2.0',
packages=setuptools.find_packages(
exclude=[
"tests"
]
),
version="0.1.0dev5"
)
<commit_msg>Bump tensorflow from 2.5.1 to 2.5.2
Bumps [tensorflow](https://github.com/tensorflow/tensorflow) from 2.5.1 to 2.5.2.
- [Release notes](https://github.com/tensorflow/tensorflow/releases)
- [Changelog](https://github.com/tensorflow/tensorflow/blob/master/RELEASE.md)
- [Commits](https://github.com/tensorflow/tensorflow/compare/v2.5.1...v2.5.2)
---
updated-dependencies:
- dependency-name: tensorflow
dependency-type: direct:production
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com><commit_after>
|
import setuptools
setuptools.setup(
python_requires='<3.8',
entry_points={
"console_scripts": [
"microscopeimagequality=microscopeimagequality.application:command"
]
},
install_requires=[
"click",
"matplotlib",
"nose",
"numpy<1.19.0,>=1.16.0",
"Pillow",
"scikit-image",
"scipy",
"six",
"tensorflow==2.5.2",
"imagecodecs",
],
test_requires=["pytest"],
name="microscopeimagequality",
package_data={
"microscopeimagequality": [
"data/"
]
},
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'],
description="Microscope Image Quality Classification",
url='https://github.com/google/microscopeimagequality',
author='Samuel Yang',
author_email='samuely@google.com',
license='Apache 2.0',
packages=setuptools.find_packages(
exclude=[
"tests"
]
),
version="0.1.0dev5"
)
|
import setuptools
setuptools.setup(
python_requires='<3.8',
entry_points={
"console_scripts": [
"microscopeimagequality=microscopeimagequality.application:command"
]
},
install_requires=[
"click",
"matplotlib",
"nose",
"numpy<1.19.0,>=1.16.0",
"Pillow",
"scikit-image",
"scipy",
"six",
"tensorflow==2.5.1",
"imagecodecs",
],
test_requires=["pytest"],
name="microscopeimagequality",
package_data={
"microscopeimagequality": [
"data/"
]
},
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'],
description="Microscope Image Quality Classification",
url='https://github.com/google/microscopeimagequality',
author='Samuel Yang',
author_email='samuely@google.com',
license='Apache 2.0',
packages=setuptools.find_packages(
exclude=[
"tests"
]
),
version="0.1.0dev5"
)
Bump tensorflow from 2.5.1 to 2.5.2
Bumps [tensorflow](https://github.com/tensorflow/tensorflow) from 2.5.1 to 2.5.2.
- [Release notes](https://github.com/tensorflow/tensorflow/releases)
- [Changelog](https://github.com/tensorflow/tensorflow/blob/master/RELEASE.md)
- [Commits](https://github.com/tensorflow/tensorflow/compare/v2.5.1...v2.5.2)
---
updated-dependencies:
- dependency-name: tensorflow
dependency-type: direct:production
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com>import setuptools
setuptools.setup(
python_requires='<3.8',
entry_points={
"console_scripts": [
"microscopeimagequality=microscopeimagequality.application:command"
]
},
install_requires=[
"click",
"matplotlib",
"nose",
"numpy<1.19.0,>=1.16.0",
"Pillow",
"scikit-image",
"scipy",
"six",
"tensorflow==2.5.2",
"imagecodecs",
],
test_requires=["pytest"],
name="microscopeimagequality",
package_data={
"microscopeimagequality": [
"data/"
]
},
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'],
description="Microscope Image Quality Classification",
url='https://github.com/google/microscopeimagequality',
author='Samuel Yang',
author_email='samuely@google.com',
license='Apache 2.0',
packages=setuptools.find_packages(
exclude=[
"tests"
]
),
version="0.1.0dev5"
)
|
<commit_before>import setuptools
setuptools.setup(
python_requires='<3.8',
entry_points={
"console_scripts": [
"microscopeimagequality=microscopeimagequality.application:command"
]
},
install_requires=[
"click",
"matplotlib",
"nose",
"numpy<1.19.0,>=1.16.0",
"Pillow",
"scikit-image",
"scipy",
"six",
"tensorflow==2.5.1",
"imagecodecs",
],
test_requires=["pytest"],
name="microscopeimagequality",
package_data={
"microscopeimagequality": [
"data/"
]
},
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'],
description="Microscope Image Quality Classification",
url='https://github.com/google/microscopeimagequality',
author='Samuel Yang',
author_email='samuely@google.com',
license='Apache 2.0',
packages=setuptools.find_packages(
exclude=[
"tests"
]
),
version="0.1.0dev5"
)
<commit_msg>Bump tensorflow from 2.5.1 to 2.5.2
Bumps [tensorflow](https://github.com/tensorflow/tensorflow) from 2.5.1 to 2.5.2.
- [Release notes](https://github.com/tensorflow/tensorflow/releases)
- [Changelog](https://github.com/tensorflow/tensorflow/blob/master/RELEASE.md)
- [Commits](https://github.com/tensorflow/tensorflow/compare/v2.5.1...v2.5.2)
---
updated-dependencies:
- dependency-name: tensorflow
dependency-type: direct:production
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com><commit_after>import setuptools
setuptools.setup(
python_requires='<3.8',
entry_points={
"console_scripts": [
"microscopeimagequality=microscopeimagequality.application:command"
]
},
install_requires=[
"click",
"matplotlib",
"nose",
"numpy<1.19.0,>=1.16.0",
"Pillow",
"scikit-image",
"scipy",
"six",
"tensorflow==2.5.2",
"imagecodecs",
],
test_requires=["pytest"],
name="microscopeimagequality",
package_data={
"microscopeimagequality": [
"data/"
]
},
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'],
description="Microscope Image Quality Classification",
url='https://github.com/google/microscopeimagequality',
author='Samuel Yang',
author_email='samuely@google.com',
license='Apache 2.0',
packages=setuptools.find_packages(
exclude=[
"tests"
]
),
version="0.1.0dev5"
)
|
fd1a0850f9c4c5c34accf64af47ac9bbf25faf74
|
setup.py
|
setup.py
|
import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='am@dvhb.ru',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
]
)
|
import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='am@dvhb.ru',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
'aioauth-client',
]
)
|
Add aioauth-client into package install_requires
|
Add aioauth-client into package install_requires
|
Python
|
mit
|
dvhbru/dvhb-hybrid
|
import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='am@dvhb.ru',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
]
)
Add aioauth-client into package install_requires
|
import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='am@dvhb.ru',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
'aioauth-client',
]
)
|
<commit_before>import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='am@dvhb.ru',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
]
)
<commit_msg>Add aioauth-client into package install_requires<commit_after>
|
import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='am@dvhb.ru',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
'aioauth-client',
]
)
|
import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='am@dvhb.ru',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
]
)
Add aioauth-client into package install_requiresimport re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='am@dvhb.ru',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
'aioauth-client',
]
)
|
<commit_before>import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='am@dvhb.ru',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
]
)
<commit_msg>Add aioauth-client into package install_requires<commit_after>import re
from pathlib import Path
from setuptools import setup, find_packages
with Path(__file__).with_name('dvhb_hybrid').joinpath('__init__.py').open() as f:
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(f.read()).group(1)
setup(
name='dvhb-hybrid',
version=VERSION,
description='',
author='Malev A',
author_email='am@dvhb.ru',
url='https://github.com/dvhbru/dvhb-hybrid',
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
'Framework :: Aiohttp',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'django',
'psycopg2',
'aiopg',
'aioworkers',
'aiohttp_apiset',
'sqlalchemy',
'pyyaml',
'Pillow',
'Babel',
'aioauth-client',
]
)
|
39acd88ede1ea4beae22fc7596a9b886554af0b2
|
setup.py
|
setup.py
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.7.0',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.7.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
Increment version for minor fix.
|
Increment version for minor fix.
|
Python
|
bsd-3-clause
|
consbio/parserutils
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.7.0',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
Increment version for minor fix.
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.7.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
<commit_before>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.7.0',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
<commit_msg>Increment version for minor fix.<commit_after>
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.7.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.7.0',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
Increment version for minor fix.import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.7.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
<commit_before>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.7.0',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
<commit_msg>Increment version for minor fix.<commit_after>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
setup(
name='parserutils',
description='A collection of performant parsing utilities',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='0.7.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml', 'python-dateutil', 'six'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
6069605bbfff4edbb57562f27ce9fa5d7af6a3b7
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
version = '0.0.7dev'
setup(
name = 'buildbot_travis',
version = version,
description = "Adapt buildbot to work a little more like Travis.",
keywords = "buildbot travis ci",
author = "John Carr",
author_email = "john.carr@unrouted.co.uk",
license="Apache Software License",
packages = find_packages(exclude=['ez_setup']),
include_package_data = True,
zip_safe = False,
install_requires = [
'setuptools',
'buildbot',
'PyYAML',
],
)
|
from setuptools import setup, find_packages
version = '0.0.7dev'
setup(
name = 'buildbot_travis',
version = version,
description = "Adapt buildbot to work a little more like Travis.",
keywords = "buildbot travis ci",
url = "http://github.com/Jc2k/buildbot_travis",
author = "John Carr",
author_email = "john.carr@unrouted.co.uk",
license="Apache Software License",
packages = find_packages(exclude=['ez_setup']),
include_package_data = True,
zip_safe = False,
install_requires = [
'setuptools',
'buildbot',
'PyYAML',
],
)
|
Fix url metadata so zest works
|
Fix url metadata so zest works
|
Python
|
unknown
|
tardyp/buildbot_travis,buildbot/buildbot_travis,tardyp/buildbot_travis,buildbot/buildbot_travis,isotoma/buildbot_travis,isotoma/buildbot_travis,buildbot/buildbot_travis,tardyp/buildbot_travis,tardyp/buildbot_travis
|
from setuptools import setup, find_packages
version = '0.0.7dev'
setup(
name = 'buildbot_travis',
version = version,
description = "Adapt buildbot to work a little more like Travis.",
keywords = "buildbot travis ci",
author = "John Carr",
author_email = "john.carr@unrouted.co.uk",
license="Apache Software License",
packages = find_packages(exclude=['ez_setup']),
include_package_data = True,
zip_safe = False,
install_requires = [
'setuptools',
'buildbot',
'PyYAML',
],
)
Fix url metadata so zest works
|
from setuptools import setup, find_packages
version = '0.0.7dev'
setup(
name = 'buildbot_travis',
version = version,
description = "Adapt buildbot to work a little more like Travis.",
keywords = "buildbot travis ci",
url = "http://github.com/Jc2k/buildbot_travis",
author = "John Carr",
author_email = "john.carr@unrouted.co.uk",
license="Apache Software License",
packages = find_packages(exclude=['ez_setup']),
include_package_data = True,
zip_safe = False,
install_requires = [
'setuptools',
'buildbot',
'PyYAML',
],
)
|
<commit_before>from setuptools import setup, find_packages
version = '0.0.7dev'
setup(
name = 'buildbot_travis',
version = version,
description = "Adapt buildbot to work a little more like Travis.",
keywords = "buildbot travis ci",
author = "John Carr",
author_email = "john.carr@unrouted.co.uk",
license="Apache Software License",
packages = find_packages(exclude=['ez_setup']),
include_package_data = True,
zip_safe = False,
install_requires = [
'setuptools',
'buildbot',
'PyYAML',
],
)
<commit_msg>Fix url metadata so zest works<commit_after>
|
from setuptools import setup, find_packages
version = '0.0.7dev'
setup(
name = 'buildbot_travis',
version = version,
description = "Adapt buildbot to work a little more like Travis.",
keywords = "buildbot travis ci",
url = "http://github.com/Jc2k/buildbot_travis",
author = "John Carr",
author_email = "john.carr@unrouted.co.uk",
license="Apache Software License",
packages = find_packages(exclude=['ez_setup']),
include_package_data = True,
zip_safe = False,
install_requires = [
'setuptools',
'buildbot',
'PyYAML',
],
)
|
from setuptools import setup, find_packages
version = '0.0.7dev'
setup(
name = 'buildbot_travis',
version = version,
description = "Adapt buildbot to work a little more like Travis.",
keywords = "buildbot travis ci",
author = "John Carr",
author_email = "john.carr@unrouted.co.uk",
license="Apache Software License",
packages = find_packages(exclude=['ez_setup']),
include_package_data = True,
zip_safe = False,
install_requires = [
'setuptools',
'buildbot',
'PyYAML',
],
)
Fix url metadata so zest worksfrom setuptools import setup, find_packages
version = '0.0.7dev'
setup(
name = 'buildbot_travis',
version = version,
description = "Adapt buildbot to work a little more like Travis.",
keywords = "buildbot travis ci",
url = "http://github.com/Jc2k/buildbot_travis",
author = "John Carr",
author_email = "john.carr@unrouted.co.uk",
license="Apache Software License",
packages = find_packages(exclude=['ez_setup']),
include_package_data = True,
zip_safe = False,
install_requires = [
'setuptools',
'buildbot',
'PyYAML',
],
)
|
<commit_before>from setuptools import setup, find_packages
version = '0.0.7dev'
setup(
name = 'buildbot_travis',
version = version,
description = "Adapt buildbot to work a little more like Travis.",
keywords = "buildbot travis ci",
author = "John Carr",
author_email = "john.carr@unrouted.co.uk",
license="Apache Software License",
packages = find_packages(exclude=['ez_setup']),
include_package_data = True,
zip_safe = False,
install_requires = [
'setuptools',
'buildbot',
'PyYAML',
],
)
<commit_msg>Fix url metadata so zest works<commit_after>from setuptools import setup, find_packages
version = '0.0.7dev'
setup(
name = 'buildbot_travis',
version = version,
description = "Adapt buildbot to work a little more like Travis.",
keywords = "buildbot travis ci",
url = "http://github.com/Jc2k/buildbot_travis",
author = "John Carr",
author_email = "john.carr@unrouted.co.uk",
license="Apache Software License",
packages = find_packages(exclude=['ez_setup']),
include_package_data = True,
zip_safe = False,
install_requires = [
'setuptools',
'buildbot',
'PyYAML',
],
)
|
315fdda95bc9c8e967033fa2ec1981cc44a6feab
|
setup.py
|
setup.py
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='scholarly',
version='0.4.1',
author='Steven A. Cholewiak, Panos Ipeirotis, Victor Silva',
author_email='steven@cholewiak.com, panos@stern.nyu.edu, vsilva@ualberta.ca',
description='Simple access to Google Scholar authors and citations',
long_description=long_description,
long_description_content_type="text/markdown",
license='Unlicense',
url='https://github.com/scholarly-python-package/scholarly',
packages=setuptools.find_packages(),
keywords=['Google Scholar', 'academics', 'citations'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules'],
install_requires=['arrow',
'beautifulsoup4',
'bibtexparser',
'requests[security]',
'requests[socks]',
'stem',
'fake_useragent',
'PySocks',
'selenium',
'python-dotenv',
'free-proxy',
],
test_suite="test_module.py"
)
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='scholarly',
version='0.4.2',
author='Steven A. Cholewiak, Panos Ipeirotis, Victor Silva',
author_email='steven@cholewiak.com, panos@stern.nyu.edu, vsilva@ualberta.ca',
description='Simple access to Google Scholar authors and citations',
long_description=long_description,
long_description_content_type="text/markdown",
license='Unlicense',
url='https://github.com/scholarly-python-package/scholarly',
packages=setuptools.find_packages(),
keywords=['Google Scholar', 'academics', 'citations'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules'],
install_requires=['arrow',
'beautifulsoup4',
'bibtexparser',
'requests[security]',
'requests[socks]',
'stem',
'fake_useragent',
'PySocks',
'selenium',
'python-dotenv',
'free-proxy',
],
test_suite="test_module.py"
)
|
Fix release version to 0.4.2
|
Fix release version to 0.4.2
|
Python
|
unlicense
|
OrganicIrradiation/scholarly
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='scholarly',
version='0.4.1',
author='Steven A. Cholewiak, Panos Ipeirotis, Victor Silva',
author_email='steven@cholewiak.com, panos@stern.nyu.edu, vsilva@ualberta.ca',
description='Simple access to Google Scholar authors and citations',
long_description=long_description,
long_description_content_type="text/markdown",
license='Unlicense',
url='https://github.com/scholarly-python-package/scholarly',
packages=setuptools.find_packages(),
keywords=['Google Scholar', 'academics', 'citations'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules'],
install_requires=['arrow',
'beautifulsoup4',
'bibtexparser',
'requests[security]',
'requests[socks]',
'stem',
'fake_useragent',
'PySocks',
'selenium',
'python-dotenv',
'free-proxy',
],
test_suite="test_module.py"
)
Fix release version to 0.4.2
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='scholarly',
version='0.4.2',
author='Steven A. Cholewiak, Panos Ipeirotis, Victor Silva',
author_email='steven@cholewiak.com, panos@stern.nyu.edu, vsilva@ualberta.ca',
description='Simple access to Google Scholar authors and citations',
long_description=long_description,
long_description_content_type="text/markdown",
license='Unlicense',
url='https://github.com/scholarly-python-package/scholarly',
packages=setuptools.find_packages(),
keywords=['Google Scholar', 'academics', 'citations'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules'],
install_requires=['arrow',
'beautifulsoup4',
'bibtexparser',
'requests[security]',
'requests[socks]',
'stem',
'fake_useragent',
'PySocks',
'selenium',
'python-dotenv',
'free-proxy',
],
test_suite="test_module.py"
)
|
<commit_before>import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='scholarly',
version='0.4.1',
author='Steven A. Cholewiak, Panos Ipeirotis, Victor Silva',
author_email='steven@cholewiak.com, panos@stern.nyu.edu, vsilva@ualberta.ca',
description='Simple access to Google Scholar authors and citations',
long_description=long_description,
long_description_content_type="text/markdown",
license='Unlicense',
url='https://github.com/scholarly-python-package/scholarly',
packages=setuptools.find_packages(),
keywords=['Google Scholar', 'academics', 'citations'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules'],
install_requires=['arrow',
'beautifulsoup4',
'bibtexparser',
'requests[security]',
'requests[socks]',
'stem',
'fake_useragent',
'PySocks',
'selenium',
'python-dotenv',
'free-proxy',
],
test_suite="test_module.py"
)
<commit_msg>Fix release version to 0.4.2<commit_after>
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='scholarly',
version='0.4.2',
author='Steven A. Cholewiak, Panos Ipeirotis, Victor Silva',
author_email='steven@cholewiak.com, panos@stern.nyu.edu, vsilva@ualberta.ca',
description='Simple access to Google Scholar authors and citations',
long_description=long_description,
long_description_content_type="text/markdown",
license='Unlicense',
url='https://github.com/scholarly-python-package/scholarly',
packages=setuptools.find_packages(),
keywords=['Google Scholar', 'academics', 'citations'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules'],
install_requires=['arrow',
'beautifulsoup4',
'bibtexparser',
'requests[security]',
'requests[socks]',
'stem',
'fake_useragent',
'PySocks',
'selenium',
'python-dotenv',
'free-proxy',
],
test_suite="test_module.py"
)
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='scholarly',
version='0.4.1',
author='Steven A. Cholewiak, Panos Ipeirotis, Victor Silva',
author_email='steven@cholewiak.com, panos@stern.nyu.edu, vsilva@ualberta.ca',
description='Simple access to Google Scholar authors and citations',
long_description=long_description,
long_description_content_type="text/markdown",
license='Unlicense',
url='https://github.com/scholarly-python-package/scholarly',
packages=setuptools.find_packages(),
keywords=['Google Scholar', 'academics', 'citations'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules'],
install_requires=['arrow',
'beautifulsoup4',
'bibtexparser',
'requests[security]',
'requests[socks]',
'stem',
'fake_useragent',
'PySocks',
'selenium',
'python-dotenv',
'free-proxy',
],
test_suite="test_module.py"
)
Fix release version to 0.4.2import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='scholarly',
version='0.4.2',
author='Steven A. Cholewiak, Panos Ipeirotis, Victor Silva',
author_email='steven@cholewiak.com, panos@stern.nyu.edu, vsilva@ualberta.ca',
description='Simple access to Google Scholar authors and citations',
long_description=long_description,
long_description_content_type="text/markdown",
license='Unlicense',
url='https://github.com/scholarly-python-package/scholarly',
packages=setuptools.find_packages(),
keywords=['Google Scholar', 'academics', 'citations'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules'],
install_requires=['arrow',
'beautifulsoup4',
'bibtexparser',
'requests[security]',
'requests[socks]',
'stem',
'fake_useragent',
'PySocks',
'selenium',
'python-dotenv',
'free-proxy',
],
test_suite="test_module.py"
)
|
<commit_before>import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='scholarly',
version='0.4.1',
author='Steven A. Cholewiak, Panos Ipeirotis, Victor Silva',
author_email='steven@cholewiak.com, panos@stern.nyu.edu, vsilva@ualberta.ca',
description='Simple access to Google Scholar authors and citations',
long_description=long_description,
long_description_content_type="text/markdown",
license='Unlicense',
url='https://github.com/scholarly-python-package/scholarly',
packages=setuptools.find_packages(),
keywords=['Google Scholar', 'academics', 'citations'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules'],
install_requires=['arrow',
'beautifulsoup4',
'bibtexparser',
'requests[security]',
'requests[socks]',
'stem',
'fake_useragent',
'PySocks',
'selenium',
'python-dotenv',
'free-proxy',
],
test_suite="test_module.py"
)
<commit_msg>Fix release version to 0.4.2<commit_after>import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='scholarly',
version='0.4.2',
author='Steven A. Cholewiak, Panos Ipeirotis, Victor Silva',
author_email='steven@cholewiak.com, panos@stern.nyu.edu, vsilva@ualberta.ca',
description='Simple access to Google Scholar authors and citations',
long_description=long_description,
long_description_content_type="text/markdown",
license='Unlicense',
url='https://github.com/scholarly-python-package/scholarly',
packages=setuptools.find_packages(),
keywords=['Google Scholar', 'academics', 'citations'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules'],
install_requires=['arrow',
'beautifulsoup4',
'bibtexparser',
'requests[security]',
'requests[socks]',
'stem',
'fake_useragent',
'PySocks',
'selenium',
'python-dotenv',
'free-proxy',
],
test_suite="test_module.py"
)
|
94547fc8554c7e193ceb4fe281ea72ffd2a0dd3a
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
try:
sys.path += [x for x in os.listdir(".") if x.endswith(".egg")]
import d2to1 # flake8: noqa
except ImportError:
import subprocess
if not subprocess.call(
[sys.executable] +
"-m pip.__init__ install distribute<0.7 d2to1>=0.2.10,<0.3".split()
):
sys.exit(subprocess.call([sys.executable] + sys.argv))
import setuptools
setuptools.setup(
setup_requires=['d2to1>=0.2.10,<0.3'],
d2to1=True)
|
#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['d2to1>=0.2.10,<0.3'],
d2to1=True)
|
Remove explicit depend on distribute.
|
Remove explicit depend on distribute.
Things in the world are moving towards setuptools 0.7, and
there is not a path between distribute and setuptools. Our explicit
dependency on setuptools is causing us to have to write patches to
try to jump through additional hoops to get it to install in the
right contexts.
Fixes bug 1189941
Change-Id: Id7a749c02203100dab52160a551d3548c8a48fd6
|
Python
|
apache-2.0
|
openstack-attic/oslo.version,emonty/oslo.version
|
#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
try:
sys.path += [x for x in os.listdir(".") if x.endswith(".egg")]
import d2to1 # flake8: noqa
except ImportError:
import subprocess
if not subprocess.call(
[sys.executable] +
"-m pip.__init__ install distribute<0.7 d2to1>=0.2.10,<0.3".split()
):
sys.exit(subprocess.call([sys.executable] + sys.argv))
import setuptools
setuptools.setup(
setup_requires=['d2to1>=0.2.10,<0.3'],
d2to1=True)
Remove explicit depend on distribute.
Things in the world are moving towards setuptools 0.7, and
there is not a path between distribute and setuptools. Our explicit
dependency on setuptools is causing us to have to write patches to
try to jump through additional hoops to get it to install in the
right contexts.
Fixes bug 1189941
Change-Id: Id7a749c02203100dab52160a551d3548c8a48fd6
|
#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['d2to1>=0.2.10,<0.3'],
d2to1=True)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
try:
sys.path += [x for x in os.listdir(".") if x.endswith(".egg")]
import d2to1 # flake8: noqa
except ImportError:
import subprocess
if not subprocess.call(
[sys.executable] +
"-m pip.__init__ install distribute<0.7 d2to1>=0.2.10,<0.3".split()
):
sys.exit(subprocess.call([sys.executable] + sys.argv))
import setuptools
setuptools.setup(
setup_requires=['d2to1>=0.2.10,<0.3'],
d2to1=True)
<commit_msg>Remove explicit depend on distribute.
Things in the world are moving towards setuptools 0.7, and
there is not a path between distribute and setuptools. Our explicit
dependency on setuptools is causing us to have to write patches to
try to jump through additional hoops to get it to install in the
right contexts.
Fixes bug 1189941
Change-Id: Id7a749c02203100dab52160a551d3548c8a48fd6<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['d2to1>=0.2.10,<0.3'],
d2to1=True)
|
#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
try:
sys.path += [x for x in os.listdir(".") if x.endswith(".egg")]
import d2to1 # flake8: noqa
except ImportError:
import subprocess
if not subprocess.call(
[sys.executable] +
"-m pip.__init__ install distribute<0.7 d2to1>=0.2.10,<0.3".split()
):
sys.exit(subprocess.call([sys.executable] + sys.argv))
import setuptools
setuptools.setup(
setup_requires=['d2to1>=0.2.10,<0.3'],
d2to1=True)
Remove explicit depend on distribute.
Things in the world are moving towards setuptools 0.7, and
there is not a path between distribute and setuptools. Our explicit
dependency on setuptools is causing us to have to write patches to
try to jump through additional hoops to get it to install in the
right contexts.
Fixes bug 1189941
Change-Id: Id7a749c02203100dab52160a551d3548c8a48fd6#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['d2to1>=0.2.10,<0.3'],
d2to1=True)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
try:
sys.path += [x for x in os.listdir(".") if x.endswith(".egg")]
import d2to1 # flake8: noqa
except ImportError:
import subprocess
if not subprocess.call(
[sys.executable] +
"-m pip.__init__ install distribute<0.7 d2to1>=0.2.10,<0.3".split()
):
sys.exit(subprocess.call([sys.executable] + sys.argv))
import setuptools
setuptools.setup(
setup_requires=['d2to1>=0.2.10,<0.3'],
d2to1=True)
<commit_msg>Remove explicit depend on distribute.
Things in the world are moving towards setuptools 0.7, and
there is not a path between distribute and setuptools. Our explicit
dependency on setuptools is causing us to have to write patches to
try to jump through additional hoops to get it to install in the
right contexts.
Fixes bug 1189941
Change-Id: Id7a749c02203100dab52160a551d3548c8a48fd6<commit_after>#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['d2to1>=0.2.10,<0.3'],
d2to1=True)
|
48f2618c9fca47e281b4aa52881e050166aefb10
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='hashmerge',
version='0.1',
url='https://github.com/lebauce/hashmerge',
author='Sylvain Baubeau',
author_email='bob@glumol.com',
description="Merges two arbitrarily deep hashes into a single hash.",
license='MIT',
include_package_data=False,
zip_safe=False,
py_modules=['hashmerge'],
long_description="",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries'],
)
|
#!/usr/bin/env python
from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='hashmerge',
version='0.1',
url='https://github.com/lebauce/hashmerge',
author='Sylvain Baubeau',
author_email='bob@glumol.com',
description="Merges two arbitrarily deep hashes into a single hash.",
license='MIT',
include_package_data=False,
zip_safe=False,
py_modules=['hashmerge'],
long_description=long_description,
long_description_content_type='text/markdown',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries'],
)
|
Use README.md as long description
|
Use README.md as long description
|
Python
|
mit
|
lebauce/hashmerge
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='hashmerge',
version='0.1',
url='https://github.com/lebauce/hashmerge',
author='Sylvain Baubeau',
author_email='bob@glumol.com',
description="Merges two arbitrarily deep hashes into a single hash.",
license='MIT',
include_package_data=False,
zip_safe=False,
py_modules=['hashmerge'],
long_description="",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries'],
)
Use README.md as long description
|
#!/usr/bin/env python
from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='hashmerge',
version='0.1',
url='https://github.com/lebauce/hashmerge',
author='Sylvain Baubeau',
author_email='bob@glumol.com',
description="Merges two arbitrarily deep hashes into a single hash.",
license='MIT',
include_package_data=False,
zip_safe=False,
py_modules=['hashmerge'],
long_description=long_description,
long_description_content_type='text/markdown',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries'],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='hashmerge',
version='0.1',
url='https://github.com/lebauce/hashmerge',
author='Sylvain Baubeau',
author_email='bob@glumol.com',
description="Merges two arbitrarily deep hashes into a single hash.",
license='MIT',
include_package_data=False,
zip_safe=False,
py_modules=['hashmerge'],
long_description="",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries'],
)
<commit_msg>Use README.md as long description<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='hashmerge',
version='0.1',
url='https://github.com/lebauce/hashmerge',
author='Sylvain Baubeau',
author_email='bob@glumol.com',
description="Merges two arbitrarily deep hashes into a single hash.",
license='MIT',
include_package_data=False,
zip_safe=False,
py_modules=['hashmerge'],
long_description=long_description,
long_description_content_type='text/markdown',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries'],
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='hashmerge',
version='0.1',
url='https://github.com/lebauce/hashmerge',
author='Sylvain Baubeau',
author_email='bob@glumol.com',
description="Merges two arbitrarily deep hashes into a single hash.",
license='MIT',
include_package_data=False,
zip_safe=False,
py_modules=['hashmerge'],
long_description="",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries'],
)
Use README.md as long description#!/usr/bin/env python
from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='hashmerge',
version='0.1',
url='https://github.com/lebauce/hashmerge',
author='Sylvain Baubeau',
author_email='bob@glumol.com',
description="Merges two arbitrarily deep hashes into a single hash.",
license='MIT',
include_package_data=False,
zip_safe=False,
py_modules=['hashmerge'],
long_description=long_description,
long_description_content_type='text/markdown',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries'],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='hashmerge',
version='0.1',
url='https://github.com/lebauce/hashmerge',
author='Sylvain Baubeau',
author_email='bob@glumol.com',
description="Merges two arbitrarily deep hashes into a single hash.",
license='MIT',
include_package_data=False,
zip_safe=False,
py_modules=['hashmerge'],
long_description="",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries'],
)
<commit_msg>Use README.md as long description<commit_after>#!/usr/bin/env python
from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='hashmerge',
version='0.1',
url='https://github.com/lebauce/hashmerge',
author='Sylvain Baubeau',
author_email='bob@glumol.com',
description="Merges two arbitrarily deep hashes into a single hash.",
license='MIT',
include_package_data=False,
zip_safe=False,
py_modules=['hashmerge'],
long_description=long_description,
long_description_content_type='text/markdown',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries'],
)
|
a759875e123dcbd37f3eb21993409397f818f6c6
|
src/pudl/metadata/__init__.py
|
src/pudl/metadata/__init__.py
|
"""Metadata constants and methods."""
from .classes import Resource
from .resources import RESOURCES
RESOURCES = {name: Resource.from_id(name) for name in RESOURCES}
|
"""Metadata constants and methods."""
import pydantic
from . import resources
from .classes import Resource
RESOURCES = {}
errors = []
for name in resources.RESOURCES:
try:
RESOURCES[name] = Resource.from_id(name)
except pydantic.ValidationError as error:
errors.append("\n" + f"[{name}] {error}")
if errors:
raise ValueError("".join(errors))
|
Print all resource validation errors
|
Print all resource validation errors
|
Python
|
mit
|
catalyst-cooperative/pudl,catalyst-cooperative/pudl
|
"""Metadata constants and methods."""
from .classes import Resource
from .resources import RESOURCES
RESOURCES = {name: Resource.from_id(name) for name in RESOURCES}
Print all resource validation errors
|
"""Metadata constants and methods."""
import pydantic
from . import resources
from .classes import Resource
RESOURCES = {}
errors = []
for name in resources.RESOURCES:
try:
RESOURCES[name] = Resource.from_id(name)
except pydantic.ValidationError as error:
errors.append("\n" + f"[{name}] {error}")
if errors:
raise ValueError("".join(errors))
|
<commit_before>"""Metadata constants and methods."""
from .classes import Resource
from .resources import RESOURCES
RESOURCES = {name: Resource.from_id(name) for name in RESOURCES}
<commit_msg>Print all resource validation errors<commit_after>
|
"""Metadata constants and methods."""
import pydantic
from . import resources
from .classes import Resource
RESOURCES = {}
errors = []
for name in resources.RESOURCES:
try:
RESOURCES[name] = Resource.from_id(name)
except pydantic.ValidationError as error:
errors.append("\n" + f"[{name}] {error}")
if errors:
raise ValueError("".join(errors))
|
"""Metadata constants and methods."""
from .classes import Resource
from .resources import RESOURCES
RESOURCES = {name: Resource.from_id(name) for name in RESOURCES}
Print all resource validation errors"""Metadata constants and methods."""
import pydantic
from . import resources
from .classes import Resource
RESOURCES = {}
errors = []
for name in resources.RESOURCES:
try:
RESOURCES[name] = Resource.from_id(name)
except pydantic.ValidationError as error:
errors.append("\n" + f"[{name}] {error}")
if errors:
raise ValueError("".join(errors))
|
<commit_before>"""Metadata constants and methods."""
from .classes import Resource
from .resources import RESOURCES
RESOURCES = {name: Resource.from_id(name) for name in RESOURCES}
<commit_msg>Print all resource validation errors<commit_after>"""Metadata constants and methods."""
import pydantic
from . import resources
from .classes import Resource
RESOURCES = {}
errors = []
for name in resources.RESOURCES:
try:
RESOURCES[name] = Resource.from_id(name)
except pydantic.ValidationError as error:
errors.append("\n" + f"[{name}] {error}")
if errors:
raise ValueError("".join(errors))
|
36307802a45f94cb218ce9bbe4a4abc7704a973a
|
graphics/savefig.py
|
graphics/savefig.py
|
import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,**kwargs)
else:
plt.savefig(final_filename,**kwargs)
|
import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,bbox_inches='tight',**kwargs)
else:
plt.savefig(final_filename,bbox_inches='tight',**kwargs)
|
Save figures with white space cropped out
|
Save figures with white space cropped out
|
Python
|
mit
|
joelfrederico/SciSalt
|
import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,**kwargs)
else:
plt.savefig(final_filename,**kwargs)
Save figures with white space cropped out
|
import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,bbox_inches='tight',**kwargs)
else:
plt.savefig(final_filename,bbox_inches='tight',**kwargs)
|
<commit_before>import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,**kwargs)
else:
plt.savefig(final_filename,**kwargs)
<commit_msg>Save figures with white space cropped out<commit_after>
|
import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,bbox_inches='tight',**kwargs)
else:
plt.savefig(final_filename,bbox_inches='tight',**kwargs)
|
import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,**kwargs)
else:
plt.savefig(final_filename,**kwargs)
Save figures with white space cropped outimport os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,bbox_inches='tight',**kwargs)
else:
plt.savefig(final_filename,bbox_inches='tight',**kwargs)
|
<commit_before>import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,**kwargs)
else:
plt.savefig(final_filename,**kwargs)
<commit_msg>Save figures with white space cropped out<commit_after>import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,bbox_inches='tight',**kwargs)
else:
plt.savefig(final_filename,bbox_inches='tight',**kwargs)
|
e6a7548546b690118537ae2a52b63d39eea6580f
|
graphiter/models.py
|
graphiter/models.py
|
from django.db import models
from django.core.urlresolvers import reverse
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"", blank=True)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('page_detail', kwargs={'slug': self.slug})
|
from django.db import models
from django.core.urlresolvers import reverse
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(
max_length=50,
default=u"-24h",
help_text=u"The default 'from' parameter to use for all charts on this page. Can be overridden via GET param when viewing the Page.",
)
time_until = models.CharField(
max_length=50,
default=u"",
blank=True,
help_text=u"The default 'until' parameter to use for all charts on this page. Can be overridden via GET param when viewing this Page.",
)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('page_detail', kwargs={'slug': self.slug})
|
Add help_text for time_from and time_until
|
Add help_text for time_from and time_until
|
Python
|
bsd-2-clause
|
jwineinger/django-graphiter
|
from django.db import models
from django.core.urlresolvers import reverse
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"", blank=True)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('page_detail', kwargs={'slug': self.slug})
Add help_text for time_from and time_until
|
from django.db import models
from django.core.urlresolvers import reverse
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(
max_length=50,
default=u"-24h",
help_text=u"The default 'from' parameter to use for all charts on this page. Can be overridden via GET param when viewing the Page.",
)
time_until = models.CharField(
max_length=50,
default=u"",
blank=True,
help_text=u"The default 'until' parameter to use for all charts on this page. Can be overridden via GET param when viewing this Page.",
)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('page_detail', kwargs={'slug': self.slug})
|
<commit_before>from django.db import models
from django.core.urlresolvers import reverse
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"", blank=True)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('page_detail', kwargs={'slug': self.slug})
<commit_msg>Add help_text for time_from and time_until<commit_after>
|
from django.db import models
from django.core.urlresolvers import reverse
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(
max_length=50,
default=u"-24h",
help_text=u"The default 'from' parameter to use for all charts on this page. Can be overridden via GET param when viewing the Page.",
)
time_until = models.CharField(
max_length=50,
default=u"",
blank=True,
help_text=u"The default 'until' parameter to use for all charts on this page. Can be overridden via GET param when viewing this Page.",
)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('page_detail', kwargs={'slug': self.slug})
|
from django.db import models
from django.core.urlresolvers import reverse
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"", blank=True)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('page_detail', kwargs={'slug': self.slug})
Add help_text for time_from and time_untilfrom django.db import models
from django.core.urlresolvers import reverse
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(
max_length=50,
default=u"-24h",
help_text=u"The default 'from' parameter to use for all charts on this page. Can be overridden via GET param when viewing the Page.",
)
time_until = models.CharField(
max_length=50,
default=u"",
blank=True,
help_text=u"The default 'until' parameter to use for all charts on this page. Can be overridden via GET param when viewing this Page.",
)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('page_detail', kwargs={'slug': self.slug})
|
<commit_before>from django.db import models
from django.core.urlresolvers import reverse
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"", blank=True)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('page_detail', kwargs={'slug': self.slug})
<commit_msg>Add help_text for time_from and time_until<commit_after>from django.db import models
from django.core.urlresolvers import reverse
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(
max_length=50,
default=u"-24h",
help_text=u"The default 'from' parameter to use for all charts on this page. Can be overridden via GET param when viewing the Page.",
)
time_until = models.CharField(
max_length=50,
default=u"",
blank=True,
help_text=u"The default 'until' parameter to use for all charts on this page. Can be overridden via GET param when viewing this Page.",
)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('page_detail', kwargs={'slug': self.slug})
|
12f63ec4224185fc03176995d2cfc00c46c2ace3
|
MoveByLinesCommand.py
|
MoveByLinesCommand.py
|
import sublime
import sublime_plugin
class MoveByLinesCommand(sublime_plugin.TextCommand):
def run(self, edit, forward=True, extend=False, number_of_lines=1):
for _ in range(number_of_lines):
self.view.run_command('move', {"by": "lines", "forward": forward, "extend": extend})
|
import sublime
import sublime_plugin
class MoveByLinesCommand(sublime_plugin.TextCommand):
def is_selection_in_sight(self):
sel = self.view.sel()[0]
visible_region = self.view.visible_region()
in_sight = visible_region.intersects(sel) or visible_region.contains(sel)
return in_sight
def run(self, edit, forward=True, extend=False, number_of_lines=1):
# Handle the special case where the view was scrolled away from the current location of
# the cursor. Trying to move the cursor will cause the view to scroll back to the current
# position of the cursor first. Bring the cursor into the current view, and then start
# moving it from there. Only do this if multiple selections are not present, just in case
# I'm trying to do something funky there.
if len(self.view.sel()) == 1 and not self.is_selection_in_sight():
visible_region = self.view.visible_region()
self.view.sel().clear()
if forward:
pos = visible_region.begin()
else:
pos = visible_region.end()
self.view.sel().add(sublime.Region(pos, pos))
for _ in range(number_of_lines):
self.view.run_command('move', {"by": "lines", "forward": forward, "extend": extend})
|
Fix annoying issue with view scrolling
|
Fix annoying issue with view scrolling
|
Python
|
unlicense
|
rahul-ramadas/BagOfTricks
|
import sublime
import sublime_plugin
class MoveByLinesCommand(sublime_plugin.TextCommand):
def run(self, edit, forward=True, extend=False, number_of_lines=1):
for _ in range(number_of_lines):
self.view.run_command('move', {"by": "lines", "forward": forward, "extend": extend})
Fix annoying issue with view scrolling
|
import sublime
import sublime_plugin
class MoveByLinesCommand(sublime_plugin.TextCommand):
def is_selection_in_sight(self):
sel = self.view.sel()[0]
visible_region = self.view.visible_region()
in_sight = visible_region.intersects(sel) or visible_region.contains(sel)
return in_sight
def run(self, edit, forward=True, extend=False, number_of_lines=1):
# Handle the special case where the view was scrolled away from the current location of
# the cursor. Trying to move the cursor will cause the view to scroll back to the current
# position of the cursor first. Bring the cursor into the current view, and then start
# moving it from there. Only do this if multiple selections are not present, just in case
# I'm trying to do something funky there.
if len(self.view.sel()) == 1 and not self.is_selection_in_sight():
visible_region = self.view.visible_region()
self.view.sel().clear()
if forward:
pos = visible_region.begin()
else:
pos = visible_region.end()
self.view.sel().add(sublime.Region(pos, pos))
for _ in range(number_of_lines):
self.view.run_command('move', {"by": "lines", "forward": forward, "extend": extend})
|
<commit_before>import sublime
import sublime_plugin
class MoveByLinesCommand(sublime_plugin.TextCommand):
def run(self, edit, forward=True, extend=False, number_of_lines=1):
for _ in range(number_of_lines):
self.view.run_command('move', {"by": "lines", "forward": forward, "extend": extend})
<commit_msg>Fix annoying issue with view scrolling<commit_after>
|
import sublime
import sublime_plugin
class MoveByLinesCommand(sublime_plugin.TextCommand):
def is_selection_in_sight(self):
sel = self.view.sel()[0]
visible_region = self.view.visible_region()
in_sight = visible_region.intersects(sel) or visible_region.contains(sel)
return in_sight
def run(self, edit, forward=True, extend=False, number_of_lines=1):
# Handle the special case where the view was scrolled away from the current location of
# the cursor. Trying to move the cursor will cause the view to scroll back to the current
# position of the cursor first. Bring the cursor into the current view, and then start
# moving it from there. Only do this if multiple selections are not present, just in case
# I'm trying to do something funky there.
if len(self.view.sel()) == 1 and not self.is_selection_in_sight():
visible_region = self.view.visible_region()
self.view.sel().clear()
if forward:
pos = visible_region.begin()
else:
pos = visible_region.end()
self.view.sel().add(sublime.Region(pos, pos))
for _ in range(number_of_lines):
self.view.run_command('move', {"by": "lines", "forward": forward, "extend": extend})
|
import sublime
import sublime_plugin
class MoveByLinesCommand(sublime_plugin.TextCommand):
def run(self, edit, forward=True, extend=False, number_of_lines=1):
for _ in range(number_of_lines):
self.view.run_command('move', {"by": "lines", "forward": forward, "extend": extend})
Fix annoying issue with view scrollingimport sublime
import sublime_plugin
class MoveByLinesCommand(sublime_plugin.TextCommand):
def is_selection_in_sight(self):
sel = self.view.sel()[0]
visible_region = self.view.visible_region()
in_sight = visible_region.intersects(sel) or visible_region.contains(sel)
return in_sight
def run(self, edit, forward=True, extend=False, number_of_lines=1):
# Handle the special case where the view was scrolled away from the current location of
# the cursor. Trying to move the cursor will cause the view to scroll back to the current
# position of the cursor first. Bring the cursor into the current view, and then start
# moving it from there. Only do this if multiple selections are not present, just in case
# I'm trying to do something funky there.
if len(self.view.sel()) == 1 and not self.is_selection_in_sight():
visible_region = self.view.visible_region()
self.view.sel().clear()
if forward:
pos = visible_region.begin()
else:
pos = visible_region.end()
self.view.sel().add(sublime.Region(pos, pos))
for _ in range(number_of_lines):
self.view.run_command('move', {"by": "lines", "forward": forward, "extend": extend})
|
<commit_before>import sublime
import sublime_plugin
class MoveByLinesCommand(sublime_plugin.TextCommand):
def run(self, edit, forward=True, extend=False, number_of_lines=1):
for _ in range(number_of_lines):
self.view.run_command('move', {"by": "lines", "forward": forward, "extend": extend})
<commit_msg>Fix annoying issue with view scrolling<commit_after>import sublime
import sublime_plugin
class MoveByLinesCommand(sublime_plugin.TextCommand):
def is_selection_in_sight(self):
sel = self.view.sel()[0]
visible_region = self.view.visible_region()
in_sight = visible_region.intersects(sel) or visible_region.contains(sel)
return in_sight
def run(self, edit, forward=True, extend=False, number_of_lines=1):
# Handle the special case where the view was scrolled away from the current location of
# the cursor. Trying to move the cursor will cause the view to scroll back to the current
# position of the cursor first. Bring the cursor into the current view, and then start
# moving it from there. Only do this if multiple selections are not present, just in case
# I'm trying to do something funky there.
if len(self.view.sel()) == 1 and not self.is_selection_in_sight():
visible_region = self.view.visible_region()
self.view.sel().clear()
if forward:
pos = visible_region.begin()
else:
pos = visible_region.end()
self.view.sel().add(sublime.Region(pos, pos))
for _ in range(number_of_lines):
self.view.run_command('move', {"by": "lines", "forward": forward, "extend": extend})
|
85e8ddb6d72b7f21b49236ea4084029dec09a6f9
|
projects/forms.py
|
projects/forms.py
|
from django import forms
from .models import Project
class ProjectForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(ProjectForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
instance = super(ProjectForm, self).save(commit=False)
instance.user = self.user
instance.flp = self.user
instance.save(*args, **kwargs)
self.save_m2m()
return instance
class Meta:
model = Project
fields = (
'name',
'team',
'description',
'targets',
'tasks',
'target_group',
'schedule',
'resources',
'finance_description',
'partners',)
class RestrictedProjectForm(forms.ModelForm):
def save(self, *args, **kwargs):
instance = super(RestrictedProjectForm, self).save(commit=False)
return instance
class Meta:
model = Project
fileds = (
'status',
'attitude', )
|
from django import forms
from .models import Project
class ProjectForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(ProjectForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
instance = super(ProjectForm, self).save(commit=False)
instance.user = self.user
instance.flp = self.user
instance.save(*args, **kwargs)
self.save_m2m()
return instance
class Meta:
model = Project
fields = (
'name',
'team',
'description',
'targets',
'tasks',
'target_group',
'schedule',
'resources',
'finance_description',
'partners',)
class RestrictedProjectForm(forms.ModelForm):
def save(self, *args, **kwargs):
instance = super(RestrictedProjectForm, self).save(commit=False)
return instance
class Meta:
model = Project
exclude = (
'name', 'team', 'description', 'targets', 'tasks', 'target_group',
'schedule', 'resources', 'finance_description', 'partners',
'flp', 'created_at', 'user',
)
fileds = (
'status',
'attitude', )
|
Exclude fields from the RestrcitedForm (no verification)
|
Exclude fields from the RestrcitedForm (no verification)
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
from django import forms
from .models import Project
class ProjectForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(ProjectForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
instance = super(ProjectForm, self).save(commit=False)
instance.user = self.user
instance.flp = self.user
instance.save(*args, **kwargs)
self.save_m2m()
return instance
class Meta:
model = Project
fields = (
'name',
'team',
'description',
'targets',
'tasks',
'target_group',
'schedule',
'resources',
'finance_description',
'partners',)
class RestrictedProjectForm(forms.ModelForm):
def save(self, *args, **kwargs):
instance = super(RestrictedProjectForm, self).save(commit=False)
return instance
class Meta:
model = Project
fileds = (
'status',
'attitude', )Exclude fields from the RestrcitedForm (no verification)
|
from django import forms
from .models import Project
class ProjectForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(ProjectForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
instance = super(ProjectForm, self).save(commit=False)
instance.user = self.user
instance.flp = self.user
instance.save(*args, **kwargs)
self.save_m2m()
return instance
class Meta:
model = Project
fields = (
'name',
'team',
'description',
'targets',
'tasks',
'target_group',
'schedule',
'resources',
'finance_description',
'partners',)
class RestrictedProjectForm(forms.ModelForm):
def save(self, *args, **kwargs):
instance = super(RestrictedProjectForm, self).save(commit=False)
return instance
class Meta:
model = Project
exclude = (
'name', 'team', 'description', 'targets', 'tasks', 'target_group',
'schedule', 'resources', 'finance_description', 'partners',
'flp', 'created_at', 'user',
)
fileds = (
'status',
'attitude', )
|
<commit_before>from django import forms
from .models import Project
class ProjectForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(ProjectForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
instance = super(ProjectForm, self).save(commit=False)
instance.user = self.user
instance.flp = self.user
instance.save(*args, **kwargs)
self.save_m2m()
return instance
class Meta:
model = Project
fields = (
'name',
'team',
'description',
'targets',
'tasks',
'target_group',
'schedule',
'resources',
'finance_description',
'partners',)
class RestrictedProjectForm(forms.ModelForm):
def save(self, *args, **kwargs):
instance = super(RestrictedProjectForm, self).save(commit=False)
return instance
class Meta:
model = Project
fileds = (
'status',
'attitude', )<commit_msg>Exclude fields from the RestrcitedForm (no verification)<commit_after>
|
from django import forms
from .models import Project
class ProjectForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(ProjectForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
instance = super(ProjectForm, self).save(commit=False)
instance.user = self.user
instance.flp = self.user
instance.save(*args, **kwargs)
self.save_m2m()
return instance
class Meta:
model = Project
fields = (
'name',
'team',
'description',
'targets',
'tasks',
'target_group',
'schedule',
'resources',
'finance_description',
'partners',)
class RestrictedProjectForm(forms.ModelForm):
def save(self, *args, **kwargs):
instance = super(RestrictedProjectForm, self).save(commit=False)
return instance
class Meta:
model = Project
exclude = (
'name', 'team', 'description', 'targets', 'tasks', 'target_group',
'schedule', 'resources', 'finance_description', 'partners',
'flp', 'created_at', 'user',
)
fileds = (
'status',
'attitude', )
|
from django import forms
from .models import Project
class ProjectForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(ProjectForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
instance = super(ProjectForm, self).save(commit=False)
instance.user = self.user
instance.flp = self.user
instance.save(*args, **kwargs)
self.save_m2m()
return instance
class Meta:
model = Project
fields = (
'name',
'team',
'description',
'targets',
'tasks',
'target_group',
'schedule',
'resources',
'finance_description',
'partners',)
class RestrictedProjectForm(forms.ModelForm):
def save(self, *args, **kwargs):
instance = super(RestrictedProjectForm, self).save(commit=False)
return instance
class Meta:
model = Project
fileds = (
'status',
'attitude', )Exclude fields from the RestrcitedForm (no verification)from django import forms
from .models import Project
class ProjectForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(ProjectForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
instance = super(ProjectForm, self).save(commit=False)
instance.user = self.user
instance.flp = self.user
instance.save(*args, **kwargs)
self.save_m2m()
return instance
class Meta:
model = Project
fields = (
'name',
'team',
'description',
'targets',
'tasks',
'target_group',
'schedule',
'resources',
'finance_description',
'partners',)
class RestrictedProjectForm(forms.ModelForm):
def save(self, *args, **kwargs):
instance = super(RestrictedProjectForm, self).save(commit=False)
return instance
class Meta:
model = Project
exclude = (
'name', 'team', 'description', 'targets', 'tasks', 'target_group',
'schedule', 'resources', 'finance_description', 'partners',
'flp', 'created_at', 'user',
)
fileds = (
'status',
'attitude', )
|
<commit_before>from django import forms
from .models import Project
class ProjectForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(ProjectForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
instance = super(ProjectForm, self).save(commit=False)
instance.user = self.user
instance.flp = self.user
instance.save(*args, **kwargs)
self.save_m2m()
return instance
class Meta:
model = Project
fields = (
'name',
'team',
'description',
'targets',
'tasks',
'target_group',
'schedule',
'resources',
'finance_description',
'partners',)
class RestrictedProjectForm(forms.ModelForm):
def save(self, *args, **kwargs):
instance = super(RestrictedProjectForm, self).save(commit=False)
return instance
class Meta:
model = Project
fileds = (
'status',
'attitude', )<commit_msg>Exclude fields from the RestrcitedForm (no verification)<commit_after>from django import forms
from .models import Project
class ProjectForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(ProjectForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
instance = super(ProjectForm, self).save(commit=False)
instance.user = self.user
instance.flp = self.user
instance.save(*args, **kwargs)
self.save_m2m()
return instance
class Meta:
model = Project
fields = (
'name',
'team',
'description',
'targets',
'tasks',
'target_group',
'schedule',
'resources',
'finance_description',
'partners',)
class RestrictedProjectForm(forms.ModelForm):
def save(self, *args, **kwargs):
instance = super(RestrictedProjectForm, self).save(commit=False)
return instance
class Meta:
model = Project
exclude = (
'name', 'team', 'description', 'targets', 'tasks', 'target_group',
'schedule', 'resources', 'finance_description', 'partners',
'flp', 'created_at', 'user',
)
fileds = (
'status',
'attitude', )
|
57f3b49f27ab0c244b30d63c9a8b5b8dd3145df6
|
app/api_v1/serializers.py
|
app/api_v1/serializers.py
|
"""This module defines the format used by marshall to map the models."""
from flask_restful import fields
bucketlistitem_serializer = {
'id': fields.Integer,
'item_name': fields.String,
'priority': fields.String,
'done': fields.Boolean,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
bucketlist_serializer = {
'id': fields.Integer,
'list_name': fields.String,
'bucketlist_items': fields.Nested(bucketlistitem_serializer),
'created_by': fields.Integer,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
user_serializer = {
'id': fields.Integer,
'username': fields.String,
'bucketlists': fields.Nested(bucketlist_serializer)
}
|
"""This module defines the format used by marshall to map the models."""
from flask_restful import fields
bucketlistitem_serializer = {
'item_id': fields.Integer,
'item_name': fields.String,
'priority': fields.String,
'done': fields.Boolean,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
bucketlist_serializer = {
'id': fields.Integer,
'list_name': fields.String,
'bucketlist_items': fields.Nested(bucketlistitem_serializer),
'created_by': fields.Integer,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
user_serializer = {
'id': fields.Integer,
'username': fields.String,
'bucketlists': fields.Nested(bucketlist_serializer)
}
|
Add item_id key to BucketListItem serializer.
|
[Feature] Add item_id key to BucketListItem serializer.
|
Python
|
mit
|
andela-akiura/bucketlist
|
"""This module defines the format used by marshall to map the models."""
from flask_restful import fields
bucketlistitem_serializer = {
'id': fields.Integer,
'item_name': fields.String,
'priority': fields.String,
'done': fields.Boolean,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
bucketlist_serializer = {
'id': fields.Integer,
'list_name': fields.String,
'bucketlist_items': fields.Nested(bucketlistitem_serializer),
'created_by': fields.Integer,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
user_serializer = {
'id': fields.Integer,
'username': fields.String,
'bucketlists': fields.Nested(bucketlist_serializer)
}
[Feature] Add item_id key to BucketListItem serializer.
|
"""This module defines the format used by marshall to map the models."""
from flask_restful import fields
bucketlistitem_serializer = {
'item_id': fields.Integer,
'item_name': fields.String,
'priority': fields.String,
'done': fields.Boolean,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
bucketlist_serializer = {
'id': fields.Integer,
'list_name': fields.String,
'bucketlist_items': fields.Nested(bucketlistitem_serializer),
'created_by': fields.Integer,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
user_serializer = {
'id': fields.Integer,
'username': fields.String,
'bucketlists': fields.Nested(bucketlist_serializer)
}
|
<commit_before>"""This module defines the format used by marshall to map the models."""
from flask_restful import fields
bucketlistitem_serializer = {
'id': fields.Integer,
'item_name': fields.String,
'priority': fields.String,
'done': fields.Boolean,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
bucketlist_serializer = {
'id': fields.Integer,
'list_name': fields.String,
'bucketlist_items': fields.Nested(bucketlistitem_serializer),
'created_by': fields.Integer,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
user_serializer = {
'id': fields.Integer,
'username': fields.String,
'bucketlists': fields.Nested(bucketlist_serializer)
}
<commit_msg>[Feature] Add item_id key to BucketListItem serializer.<commit_after>
|
"""This module defines the format used by marshall to map the models."""
from flask_restful import fields
bucketlistitem_serializer = {
'item_id': fields.Integer,
'item_name': fields.String,
'priority': fields.String,
'done': fields.Boolean,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
bucketlist_serializer = {
'id': fields.Integer,
'list_name': fields.String,
'bucketlist_items': fields.Nested(bucketlistitem_serializer),
'created_by': fields.Integer,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
user_serializer = {
'id': fields.Integer,
'username': fields.String,
'bucketlists': fields.Nested(bucketlist_serializer)
}
|
"""This module defines the format used by marshall to map the models."""
from flask_restful import fields
bucketlistitem_serializer = {
'id': fields.Integer,
'item_name': fields.String,
'priority': fields.String,
'done': fields.Boolean,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
bucketlist_serializer = {
'id': fields.Integer,
'list_name': fields.String,
'bucketlist_items': fields.Nested(bucketlistitem_serializer),
'created_by': fields.Integer,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
user_serializer = {
'id': fields.Integer,
'username': fields.String,
'bucketlists': fields.Nested(bucketlist_serializer)
}
[Feature] Add item_id key to BucketListItem serializer."""This module defines the format used by marshall to map the models."""
from flask_restful import fields
bucketlistitem_serializer = {
'item_id': fields.Integer,
'item_name': fields.String,
'priority': fields.String,
'done': fields.Boolean,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
bucketlist_serializer = {
'id': fields.Integer,
'list_name': fields.String,
'bucketlist_items': fields.Nested(bucketlistitem_serializer),
'created_by': fields.Integer,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
user_serializer = {
'id': fields.Integer,
'username': fields.String,
'bucketlists': fields.Nested(bucketlist_serializer)
}
|
<commit_before>"""This module defines the format used by marshall to map the models."""
from flask_restful import fields
bucketlistitem_serializer = {
'id': fields.Integer,
'item_name': fields.String,
'priority': fields.String,
'done': fields.Boolean,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
bucketlist_serializer = {
'id': fields.Integer,
'list_name': fields.String,
'bucketlist_items': fields.Nested(bucketlistitem_serializer),
'created_by': fields.Integer,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
user_serializer = {
'id': fields.Integer,
'username': fields.String,
'bucketlists': fields.Nested(bucketlist_serializer)
}
<commit_msg>[Feature] Add item_id key to BucketListItem serializer.<commit_after>"""This module defines the format used by marshall to map the models."""
from flask_restful import fields
bucketlistitem_serializer = {
'item_id': fields.Integer,
'item_name': fields.String,
'priority': fields.String,
'done': fields.Boolean,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
bucketlist_serializer = {
'id': fields.Integer,
'list_name': fields.String,
'bucketlist_items': fields.Nested(bucketlistitem_serializer),
'created_by': fields.Integer,
'date_created': fields.DateTime,
'date_modified': fields.DateTime
}
user_serializer = {
'id': fields.Integer,
'username': fields.String,
'bucketlists': fields.Nested(bucketlist_serializer)
}
|
840643522e32484b1c44352dc095e7369a44ef7b
|
header_swap_axis.py
|
header_swap_axis.py
|
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
|
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
# CASA sometimes gives empty keys? ""
if len(key) == 0:
continue
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
|
Deal with CASA's empty header keywords
|
Deal with CASA's empty header keywords
|
Python
|
mit
|
e-koch/ewky_scripts,e-koch/ewky_scripts
|
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
Deal with CASA's empty header keywords
|
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
# CASA sometimes gives empty keys? ""
if len(key) == 0:
continue
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
|
<commit_before>
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
<commit_msg>Deal with CASA's empty header keywords<commit_after>
|
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
# CASA sometimes gives empty keys? ""
if len(key) == 0:
continue
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
|
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
Deal with CASA's empty header keywords
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
# CASA sometimes gives empty keys? ""
if len(key) == 0:
continue
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
|
<commit_before>
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
<commit_msg>Deal with CASA's empty header keywords<commit_after>
'''
Swap the axes in a header, without losing keys to WCS
'''
from astropy.wcs import WCS
def header_swapaxes(header, ax1, ax2):
'''
'''
mywcs = WCS(header)
new_hdr = mywcs.swapaxes(ax1, ax2).to_header()
lost_keys = list(set(header.keys()) - set(new_hdr.keys()))
for key in lost_keys:
# CASA sometimes gives empty keys? ""
if len(key) == 0:
continue
if str(ax1+1) in key:
new_hdr[key.replace(str(ax1+1), str(ax2+1))] = header[key]
elif str(ax2+1) in key:
new_hdr[key.replace(str(ax2+1), str(ax1+1))] = header[key]
else:
new_hdr[key] = header[key]
return new_hdr
|
1d6fa0521b0fbba48ddbc231614b7074a63488c2
|
tests/utils.py
|
tests/utils.py
|
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
print "importing " + abs_path
sys.path.insert(0, abs_path)
|
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
sys.path.insert(0, abs_path)
|
Remove debug messages from import.
|
Remove debug messages from import.
|
Python
|
mpl-2.0
|
EsriOceans/btm
|
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
print "importing " + abs_path
sys.path.insert(0, abs_path)
Remove debug messages from import.
|
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
sys.path.insert(0, abs_path)
|
<commit_before>import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
print "importing " + abs_path
sys.path.insert(0, abs_path)
<commit_msg>Remove debug messages from import.<commit_after>
|
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
sys.path.insert(0, abs_path)
|
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
print "importing " + abs_path
sys.path.insert(0, abs_path)
Remove debug messages from import.import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
sys.path.insert(0, abs_path)
|
<commit_before>import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
print "importing " + abs_path
sys.path.insert(0, abs_path)
<commit_msg>Remove debug messages from import.<commit_after>import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
sys.path.insert(0, abs_path)
|
854fe79574782f812313508bd8b207f6c033352a
|
event/models.py
|
event/models.py
|
from django.db import models
class Artist(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(blank=True)
thumb_url = models.URLField(blank=True)
events = models.ManyToManyField(
'event.Event',
related_name='artists',
blank=True,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Event(models.Model):
title = models.CharField(max_length=200)
datetime = models.DateTimeField()
venue = models.ForeignKey(
'event.Venue',
related_name='events',
on_delete=models.CASCADE,
)
def __str__(self):
return self.title
class Venue(models.Model):
name = models.CharField(max_length=100)
city = models.CharField(max_length=100)
country = models.CharField(max_length=100)
def __str__(self):
return self.name
|
from django.db import models
class Artist(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(blank=True)
thumb_url = models.URLField(blank=True)
events = models.ManyToManyField(
'event.Event',
related_name='artists',
blank=True,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Event(models.Model):
title = models.CharField(max_length=200)
datetime = models.DateTimeField()
venue = models.ForeignKey(
'event.Venue',
related_name='events',
on_delete=models.CASCADE,
)
class Meta:
ordering = ['datetime']
def __str__(self):
return self.title
class Venue(models.Model):
name = models.CharField(max_length=100)
city = models.CharField(max_length=100)
country = models.CharField(max_length=100)
def __str__(self):
return self.name
|
Add Event ordering by datetime
|
Add Event ordering by datetime
|
Python
|
mit
|
FedorSelitsky/eventrack,FedorSelitsky/eventrack,FedorSelitsky/eventrack,FedorSelitsky/eventrack
|
from django.db import models
class Artist(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(blank=True)
thumb_url = models.URLField(blank=True)
events = models.ManyToManyField(
'event.Event',
related_name='artists',
blank=True,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Event(models.Model):
title = models.CharField(max_length=200)
datetime = models.DateTimeField()
venue = models.ForeignKey(
'event.Venue',
related_name='events',
on_delete=models.CASCADE,
)
def __str__(self):
return self.title
class Venue(models.Model):
name = models.CharField(max_length=100)
city = models.CharField(max_length=100)
country = models.CharField(max_length=100)
def __str__(self):
return self.name
Add Event ordering by datetime
|
from django.db import models
class Artist(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(blank=True)
thumb_url = models.URLField(blank=True)
events = models.ManyToManyField(
'event.Event',
related_name='artists',
blank=True,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Event(models.Model):
title = models.CharField(max_length=200)
datetime = models.DateTimeField()
venue = models.ForeignKey(
'event.Venue',
related_name='events',
on_delete=models.CASCADE,
)
class Meta:
ordering = ['datetime']
def __str__(self):
return self.title
class Venue(models.Model):
name = models.CharField(max_length=100)
city = models.CharField(max_length=100)
country = models.CharField(max_length=100)
def __str__(self):
return self.name
|
<commit_before>from django.db import models
class Artist(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(blank=True)
thumb_url = models.URLField(blank=True)
events = models.ManyToManyField(
'event.Event',
related_name='artists',
blank=True,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Event(models.Model):
title = models.CharField(max_length=200)
datetime = models.DateTimeField()
venue = models.ForeignKey(
'event.Venue',
related_name='events',
on_delete=models.CASCADE,
)
def __str__(self):
return self.title
class Venue(models.Model):
name = models.CharField(max_length=100)
city = models.CharField(max_length=100)
country = models.CharField(max_length=100)
def __str__(self):
return self.name
<commit_msg>Add Event ordering by datetime<commit_after>
|
from django.db import models
class Artist(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(blank=True)
thumb_url = models.URLField(blank=True)
events = models.ManyToManyField(
'event.Event',
related_name='artists',
blank=True,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Event(models.Model):
title = models.CharField(max_length=200)
datetime = models.DateTimeField()
venue = models.ForeignKey(
'event.Venue',
related_name='events',
on_delete=models.CASCADE,
)
class Meta:
ordering = ['datetime']
def __str__(self):
return self.title
class Venue(models.Model):
name = models.CharField(max_length=100)
city = models.CharField(max_length=100)
country = models.CharField(max_length=100)
def __str__(self):
return self.name
|
from django.db import models
class Artist(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(blank=True)
thumb_url = models.URLField(blank=True)
events = models.ManyToManyField(
'event.Event',
related_name='artists',
blank=True,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Event(models.Model):
title = models.CharField(max_length=200)
datetime = models.DateTimeField()
venue = models.ForeignKey(
'event.Venue',
related_name='events',
on_delete=models.CASCADE,
)
def __str__(self):
return self.title
class Venue(models.Model):
name = models.CharField(max_length=100)
city = models.CharField(max_length=100)
country = models.CharField(max_length=100)
def __str__(self):
return self.name
Add Event ordering by datetimefrom django.db import models
class Artist(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(blank=True)
thumb_url = models.URLField(blank=True)
events = models.ManyToManyField(
'event.Event',
related_name='artists',
blank=True,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Event(models.Model):
title = models.CharField(max_length=200)
datetime = models.DateTimeField()
venue = models.ForeignKey(
'event.Venue',
related_name='events',
on_delete=models.CASCADE,
)
class Meta:
ordering = ['datetime']
def __str__(self):
return self.title
class Venue(models.Model):
name = models.CharField(max_length=100)
city = models.CharField(max_length=100)
country = models.CharField(max_length=100)
def __str__(self):
return self.name
|
<commit_before>from django.db import models
class Artist(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(blank=True)
thumb_url = models.URLField(blank=True)
events = models.ManyToManyField(
'event.Event',
related_name='artists',
blank=True,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Event(models.Model):
title = models.CharField(max_length=200)
datetime = models.DateTimeField()
venue = models.ForeignKey(
'event.Venue',
related_name='events',
on_delete=models.CASCADE,
)
def __str__(self):
return self.title
class Venue(models.Model):
name = models.CharField(max_length=100)
city = models.CharField(max_length=100)
country = models.CharField(max_length=100)
def __str__(self):
return self.name
<commit_msg>Add Event ordering by datetime<commit_after>from django.db import models
class Artist(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(blank=True)
thumb_url = models.URLField(blank=True)
events = models.ManyToManyField(
'event.Event',
related_name='artists',
blank=True,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Event(models.Model):
title = models.CharField(max_length=200)
datetime = models.DateTimeField()
venue = models.ForeignKey(
'event.Venue',
related_name='events',
on_delete=models.CASCADE,
)
class Meta:
ordering = ['datetime']
def __str__(self):
return self.title
class Venue(models.Model):
name = models.CharField(max_length=100)
city = models.CharField(max_length=100)
country = models.CharField(max_length=100)
def __str__(self):
return self.name
|
aed18a3f9cbaf1eae1d7066b438437446513d912
|
sphinxcontrib/traceables/__init__.py
|
sphinxcontrib/traceables/__init__.py
|
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
|
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.display.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
|
Fix missing call to display.setup()
|
Fix missing call to display.setup()
|
Python
|
apache-2.0
|
t4ngo/sphinxcontrib-traceables
|
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
Fix missing call to display.setup()
|
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.display.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
|
<commit_before>
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
<commit_msg>Fix missing call to display.setup()<commit_after>
|
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.display.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
|
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
Fix missing call to display.setup()
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.display.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
|
<commit_before>
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
<commit_msg>Fix missing call to display.setup()<commit_after>
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.display.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
|
7eced1e5a5522febde0f4492791de25b40e110da
|
elm_format.py
|
elm_format.py
|
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, shell=True)
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', False):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
|
from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, sterr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + output.strip(), 'errors: ' + errors.strip())
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', False):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
|
Add debug logging to elm-format
|
Add debug logging to elm-format
|
Python
|
mit
|
sekjun9878/Elm.tmLanguage,deadfoxygrandpa/Elm.tmLanguage,sekjun9878/Elm.tmLanguage,deadfoxygrandpa/Elm.tmLanguage
|
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, shell=True)
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', False):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
Add debug logging to elm-format
|
from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, sterr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + output.strip(), 'errors: ' + errors.strip())
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', False):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
|
<commit_before>import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, shell=True)
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', False):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
<commit_msg>Add debug logging to elm-format<commit_after>
|
from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, sterr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + output.strip(), 'errors: ' + errors.strip())
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', False):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
|
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, shell=True)
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', False):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
Add debug logging to elm-formatfrom __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, sterr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + output.strip(), 'errors: ' + errors.strip())
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', False):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
|
<commit_before>import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, shell=True)
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', False):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
<commit_msg>Add debug logging to elm-format<commit_after>from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, sterr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + output.strip(), 'errors: ' + errors.strip())
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', False):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
|
3af265ab0740378267a3c3e9cc85bb21468bf2e0
|
engine/cli.py
|
engine/cli.py
|
from engine.event import *
from engine.action import *
from engine.code import *
from engine.player import *
from engine.round import *
from engine.team import *
def processInput():
userText = input("Enter command [Add player] [Team player] [Spot] [Web spot] [Flee jail] [Print] [teamChat]: \n")
if 'f' in userText:
jailCode = input("enter jail code: ")
Action.fleePlayerWithCode(jailCode)
Stats.printPlayersDetailed()
elif 's' in userText:
mobile = input("enter mobile: ")
code = input("enter code: ")
Action.handleSms(mobile, code)
Stats.printPlayersDetailed()
elif 'a' in userText:
name = input("enter name: ")
mobile = input("enter mobile: ")
#email = input("enter email: ")
Action.addPlayer(name, mobile, "")
Stats.printPlayersDetailed()
elif 'w' in userText:
hash = input("enter player hash: ")
code = input("enter code: ")
Action.handleWeb(hash, code)
Stats.printPlayersDetailed()
elif 't' in userText:
name = input("enter player name: ")
team = input("enter team name: ")
Action.addPlayerToTeam(name, team)
Stats.printPlayersDetailed()
elif 'p' in userText:
Stats.printStats()
elif 'c' in userText:
name = input("enter name: ")
message = input("enter text: ")
playerId = Player._getIdByName(name)
Action.sayToMyTeam(playerId, message)
|
from engine.action import Action, Stats
from engine.player import Player
def processInput():
userText = input("Enter command [Add player] [Team player] [Spot] [Web spot] [Flee jail] [Print] [teamChat]: \n")
if 'f' in userText:
jailCode = input("enter jail code: ")
Action.fleePlayerWithCode(jailCode)
Stats.printPlayersDetailed()
elif 's' in userText:
mobile = input("enter mobile: ")
code = input("enter code: ")
Action.handleSms(mobile, code)
Stats.printPlayersDetailed()
elif 'a' in userText:
name = input("enter name: ")
mobile = input("enter mobile: ")
#email = input("enter email: ")
Action.addPlayer(name, mobile, "")
Stats.printPlayersDetailed()
elif 'w' in userText:
hash = input("enter player hash: ")
code = input("enter code: ")
Action.handleWeb(hash, code)
Stats.printPlayersDetailed()
elif 't' in userText:
name = input("enter player name: ")
team = input("enter team name: ")
Action.addPlayerToTeam(name, team)
Stats.printPlayersDetailed()
elif 'p' in userText:
Stats.printStats()
elif 'c' in userText:
name = input("enter name: ")
message = input("enter text: ")
playerId = Player._getIdByName(name)
Action.sayToMyTeam(playerId, message)
|
Remove a few unnecessary imports
|
Remove a few unnecessary imports
|
Python
|
bsd-2-clause
|
mahfiaz/spotter_irl,mahfiaz/spotter_irl,mahfiaz/spotter_irl
|
from engine.event import *
from engine.action import *
from engine.code import *
from engine.player import *
from engine.round import *
from engine.team import *
def processInput():
userText = input("Enter command [Add player] [Team player] [Spot] [Web spot] [Flee jail] [Print] [teamChat]: \n")
if 'f' in userText:
jailCode = input("enter jail code: ")
Action.fleePlayerWithCode(jailCode)
Stats.printPlayersDetailed()
elif 's' in userText:
mobile = input("enter mobile: ")
code = input("enter code: ")
Action.handleSms(mobile, code)
Stats.printPlayersDetailed()
elif 'a' in userText:
name = input("enter name: ")
mobile = input("enter mobile: ")
#email = input("enter email: ")
Action.addPlayer(name, mobile, "")
Stats.printPlayersDetailed()
elif 'w' in userText:
hash = input("enter player hash: ")
code = input("enter code: ")
Action.handleWeb(hash, code)
Stats.printPlayersDetailed()
elif 't' in userText:
name = input("enter player name: ")
team = input("enter team name: ")
Action.addPlayerToTeam(name, team)
Stats.printPlayersDetailed()
elif 'p' in userText:
Stats.printStats()
elif 'c' in userText:
name = input("enter name: ")
message = input("enter text: ")
playerId = Player._getIdByName(name)
Action.sayToMyTeam(playerId, message)
Remove a few unnecessary imports
|
from engine.action import Action, Stats
from engine.player import Player
def processInput():
userText = input("Enter command [Add player] [Team player] [Spot] [Web spot] [Flee jail] [Print] [teamChat]: \n")
if 'f' in userText:
jailCode = input("enter jail code: ")
Action.fleePlayerWithCode(jailCode)
Stats.printPlayersDetailed()
elif 's' in userText:
mobile = input("enter mobile: ")
code = input("enter code: ")
Action.handleSms(mobile, code)
Stats.printPlayersDetailed()
elif 'a' in userText:
name = input("enter name: ")
mobile = input("enter mobile: ")
#email = input("enter email: ")
Action.addPlayer(name, mobile, "")
Stats.printPlayersDetailed()
elif 'w' in userText:
hash = input("enter player hash: ")
code = input("enter code: ")
Action.handleWeb(hash, code)
Stats.printPlayersDetailed()
elif 't' in userText:
name = input("enter player name: ")
team = input("enter team name: ")
Action.addPlayerToTeam(name, team)
Stats.printPlayersDetailed()
elif 'p' in userText:
Stats.printStats()
elif 'c' in userText:
name = input("enter name: ")
message = input("enter text: ")
playerId = Player._getIdByName(name)
Action.sayToMyTeam(playerId, message)
|
<commit_before>from engine.event import *
from engine.action import *
from engine.code import *
from engine.player import *
from engine.round import *
from engine.team import *
def processInput():
userText = input("Enter command [Add player] [Team player] [Spot] [Web spot] [Flee jail] [Print] [teamChat]: \n")
if 'f' in userText:
jailCode = input("enter jail code: ")
Action.fleePlayerWithCode(jailCode)
Stats.printPlayersDetailed()
elif 's' in userText:
mobile = input("enter mobile: ")
code = input("enter code: ")
Action.handleSms(mobile, code)
Stats.printPlayersDetailed()
elif 'a' in userText:
name = input("enter name: ")
mobile = input("enter mobile: ")
#email = input("enter email: ")
Action.addPlayer(name, mobile, "")
Stats.printPlayersDetailed()
elif 'w' in userText:
hash = input("enter player hash: ")
code = input("enter code: ")
Action.handleWeb(hash, code)
Stats.printPlayersDetailed()
elif 't' in userText:
name = input("enter player name: ")
team = input("enter team name: ")
Action.addPlayerToTeam(name, team)
Stats.printPlayersDetailed()
elif 'p' in userText:
Stats.printStats()
elif 'c' in userText:
name = input("enter name: ")
message = input("enter text: ")
playerId = Player._getIdByName(name)
Action.sayToMyTeam(playerId, message)
<commit_msg>Remove a few unnecessary imports<commit_after>
|
from engine.action import Action, Stats
from engine.player import Player
def processInput():
userText = input("Enter command [Add player] [Team player] [Spot] [Web spot] [Flee jail] [Print] [teamChat]: \n")
if 'f' in userText:
jailCode = input("enter jail code: ")
Action.fleePlayerWithCode(jailCode)
Stats.printPlayersDetailed()
elif 's' in userText:
mobile = input("enter mobile: ")
code = input("enter code: ")
Action.handleSms(mobile, code)
Stats.printPlayersDetailed()
elif 'a' in userText:
name = input("enter name: ")
mobile = input("enter mobile: ")
#email = input("enter email: ")
Action.addPlayer(name, mobile, "")
Stats.printPlayersDetailed()
elif 'w' in userText:
hash = input("enter player hash: ")
code = input("enter code: ")
Action.handleWeb(hash, code)
Stats.printPlayersDetailed()
elif 't' in userText:
name = input("enter player name: ")
team = input("enter team name: ")
Action.addPlayerToTeam(name, team)
Stats.printPlayersDetailed()
elif 'p' in userText:
Stats.printStats()
elif 'c' in userText:
name = input("enter name: ")
message = input("enter text: ")
playerId = Player._getIdByName(name)
Action.sayToMyTeam(playerId, message)
|
from engine.event import *
from engine.action import *
from engine.code import *
from engine.player import *
from engine.round import *
from engine.team import *
def processInput():
userText = input("Enter command [Add player] [Team player] [Spot] [Web spot] [Flee jail] [Print] [teamChat]: \n")
if 'f' in userText:
jailCode = input("enter jail code: ")
Action.fleePlayerWithCode(jailCode)
Stats.printPlayersDetailed()
elif 's' in userText:
mobile = input("enter mobile: ")
code = input("enter code: ")
Action.handleSms(mobile, code)
Stats.printPlayersDetailed()
elif 'a' in userText:
name = input("enter name: ")
mobile = input("enter mobile: ")
#email = input("enter email: ")
Action.addPlayer(name, mobile, "")
Stats.printPlayersDetailed()
elif 'w' in userText:
hash = input("enter player hash: ")
code = input("enter code: ")
Action.handleWeb(hash, code)
Stats.printPlayersDetailed()
elif 't' in userText:
name = input("enter player name: ")
team = input("enter team name: ")
Action.addPlayerToTeam(name, team)
Stats.printPlayersDetailed()
elif 'p' in userText:
Stats.printStats()
elif 'c' in userText:
name = input("enter name: ")
message = input("enter text: ")
playerId = Player._getIdByName(name)
Action.sayToMyTeam(playerId, message)
Remove a few unnecessary importsfrom engine.action import Action, Stats
from engine.player import Player
def processInput():
userText = input("Enter command [Add player] [Team player] [Spot] [Web spot] [Flee jail] [Print] [teamChat]: \n")
if 'f' in userText:
jailCode = input("enter jail code: ")
Action.fleePlayerWithCode(jailCode)
Stats.printPlayersDetailed()
elif 's' in userText:
mobile = input("enter mobile: ")
code = input("enter code: ")
Action.handleSms(mobile, code)
Stats.printPlayersDetailed()
elif 'a' in userText:
name = input("enter name: ")
mobile = input("enter mobile: ")
#email = input("enter email: ")
Action.addPlayer(name, mobile, "")
Stats.printPlayersDetailed()
elif 'w' in userText:
hash = input("enter player hash: ")
code = input("enter code: ")
Action.handleWeb(hash, code)
Stats.printPlayersDetailed()
elif 't' in userText:
name = input("enter player name: ")
team = input("enter team name: ")
Action.addPlayerToTeam(name, team)
Stats.printPlayersDetailed()
elif 'p' in userText:
Stats.printStats()
elif 'c' in userText:
name = input("enter name: ")
message = input("enter text: ")
playerId = Player._getIdByName(name)
Action.sayToMyTeam(playerId, message)
|
<commit_before>from engine.event import *
from engine.action import *
from engine.code import *
from engine.player import *
from engine.round import *
from engine.team import *
def processInput():
userText = input("Enter command [Add player] [Team player] [Spot] [Web spot] [Flee jail] [Print] [teamChat]: \n")
if 'f' in userText:
jailCode = input("enter jail code: ")
Action.fleePlayerWithCode(jailCode)
Stats.printPlayersDetailed()
elif 's' in userText:
mobile = input("enter mobile: ")
code = input("enter code: ")
Action.handleSms(mobile, code)
Stats.printPlayersDetailed()
elif 'a' in userText:
name = input("enter name: ")
mobile = input("enter mobile: ")
#email = input("enter email: ")
Action.addPlayer(name, mobile, "")
Stats.printPlayersDetailed()
elif 'w' in userText:
hash = input("enter player hash: ")
code = input("enter code: ")
Action.handleWeb(hash, code)
Stats.printPlayersDetailed()
elif 't' in userText:
name = input("enter player name: ")
team = input("enter team name: ")
Action.addPlayerToTeam(name, team)
Stats.printPlayersDetailed()
elif 'p' in userText:
Stats.printStats()
elif 'c' in userText:
name = input("enter name: ")
message = input("enter text: ")
playerId = Player._getIdByName(name)
Action.sayToMyTeam(playerId, message)
<commit_msg>Remove a few unnecessary imports<commit_after>from engine.action import Action, Stats
from engine.player import Player
def processInput():
userText = input("Enter command [Add player] [Team player] [Spot] [Web spot] [Flee jail] [Print] [teamChat]: \n")
if 'f' in userText:
jailCode = input("enter jail code: ")
Action.fleePlayerWithCode(jailCode)
Stats.printPlayersDetailed()
elif 's' in userText:
mobile = input("enter mobile: ")
code = input("enter code: ")
Action.handleSms(mobile, code)
Stats.printPlayersDetailed()
elif 'a' in userText:
name = input("enter name: ")
mobile = input("enter mobile: ")
#email = input("enter email: ")
Action.addPlayer(name, mobile, "")
Stats.printPlayersDetailed()
elif 'w' in userText:
hash = input("enter player hash: ")
code = input("enter code: ")
Action.handleWeb(hash, code)
Stats.printPlayersDetailed()
elif 't' in userText:
name = input("enter player name: ")
team = input("enter team name: ")
Action.addPlayerToTeam(name, team)
Stats.printPlayersDetailed()
elif 'p' in userText:
Stats.printStats()
elif 'c' in userText:
name = input("enter name: ")
message = input("enter text: ")
playerId = Player._getIdByName(name)
Action.sayToMyTeam(playerId, message)
|
49b5775f430f9d32638f074661ae877047f6dcb2
|
api/v2/serializers/summaries/image.py
|
api/v2/serializers/summaries/image.py
|
from core.models import Application as Image
from rest_framework import serializers
from api.v2.serializers.fields.base import UUIDHyperlinkedIdentityField
class ImageSummarySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.PrimaryKeyRelatedField(
source='created_by',
read_only=True)
url = UUIDHyperlinkedIdentityField(
view_name='api:v2:application-detail',
)
class Meta:
model = Image
fields = ('id', 'url', 'uuid', 'name', 'description', 'icon',
'start_date', 'end_date', 'user')
|
from core.models import Application as Image
from rest_framework import serializers
from api.v2.serializers.fields.base import UUIDHyperlinkedIdentityField
class ImageSummarySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.PrimaryKeyRelatedField(
source='created_by',
read_only=True)
tags = serializers.SerializerMethodField()
url = UUIDHyperlinkedIdentityField(
view_name='api:v2:application-detail',
)
def get_tags(self, obj):
from api.v2.serializers.details import TagSerializer
serializer = TagSerializer(obj.tags.all(), many=True, context=self.context)
return serializer.data
class Meta:
model = Image
fields = ('id', 'url', 'uuid', 'name', 'description', 'icon',
'tags', 'start_date', 'end_date', 'user')
|
Include 'tags' in the Image summary (returned by the /v2/instances detail API)
|
Include 'tags' in the Image summary (returned by the /v2/instances detail API)
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
from core.models import Application as Image
from rest_framework import serializers
from api.v2.serializers.fields.base import UUIDHyperlinkedIdentityField
class ImageSummarySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.PrimaryKeyRelatedField(
source='created_by',
read_only=True)
url = UUIDHyperlinkedIdentityField(
view_name='api:v2:application-detail',
)
class Meta:
model = Image
fields = ('id', 'url', 'uuid', 'name', 'description', 'icon',
'start_date', 'end_date', 'user')
Include 'tags' in the Image summary (returned by the /v2/instances detail API)
|
from core.models import Application as Image
from rest_framework import serializers
from api.v2.serializers.fields.base import UUIDHyperlinkedIdentityField
class ImageSummarySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.PrimaryKeyRelatedField(
source='created_by',
read_only=True)
tags = serializers.SerializerMethodField()
url = UUIDHyperlinkedIdentityField(
view_name='api:v2:application-detail',
)
def get_tags(self, obj):
from api.v2.serializers.details import TagSerializer
serializer = TagSerializer(obj.tags.all(), many=True, context=self.context)
return serializer.data
class Meta:
model = Image
fields = ('id', 'url', 'uuid', 'name', 'description', 'icon',
'tags', 'start_date', 'end_date', 'user')
|
<commit_before>from core.models import Application as Image
from rest_framework import serializers
from api.v2.serializers.fields.base import UUIDHyperlinkedIdentityField
class ImageSummarySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.PrimaryKeyRelatedField(
source='created_by',
read_only=True)
url = UUIDHyperlinkedIdentityField(
view_name='api:v2:application-detail',
)
class Meta:
model = Image
fields = ('id', 'url', 'uuid', 'name', 'description', 'icon',
'start_date', 'end_date', 'user')
<commit_msg>Include 'tags' in the Image summary (returned by the /v2/instances detail API)<commit_after>
|
from core.models import Application as Image
from rest_framework import serializers
from api.v2.serializers.fields.base import UUIDHyperlinkedIdentityField
class ImageSummarySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.PrimaryKeyRelatedField(
source='created_by',
read_only=True)
tags = serializers.SerializerMethodField()
url = UUIDHyperlinkedIdentityField(
view_name='api:v2:application-detail',
)
def get_tags(self, obj):
from api.v2.serializers.details import TagSerializer
serializer = TagSerializer(obj.tags.all(), many=True, context=self.context)
return serializer.data
class Meta:
model = Image
fields = ('id', 'url', 'uuid', 'name', 'description', 'icon',
'tags', 'start_date', 'end_date', 'user')
|
from core.models import Application as Image
from rest_framework import serializers
from api.v2.serializers.fields.base import UUIDHyperlinkedIdentityField
class ImageSummarySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.PrimaryKeyRelatedField(
source='created_by',
read_only=True)
url = UUIDHyperlinkedIdentityField(
view_name='api:v2:application-detail',
)
class Meta:
model = Image
fields = ('id', 'url', 'uuid', 'name', 'description', 'icon',
'start_date', 'end_date', 'user')
Include 'tags' in the Image summary (returned by the /v2/instances detail API)from core.models import Application as Image
from rest_framework import serializers
from api.v2.serializers.fields.base import UUIDHyperlinkedIdentityField
class ImageSummarySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.PrimaryKeyRelatedField(
source='created_by',
read_only=True)
tags = serializers.SerializerMethodField()
url = UUIDHyperlinkedIdentityField(
view_name='api:v2:application-detail',
)
def get_tags(self, obj):
from api.v2.serializers.details import TagSerializer
serializer = TagSerializer(obj.tags.all(), many=True, context=self.context)
return serializer.data
class Meta:
model = Image
fields = ('id', 'url', 'uuid', 'name', 'description', 'icon',
'tags', 'start_date', 'end_date', 'user')
|
<commit_before>from core.models import Application as Image
from rest_framework import serializers
from api.v2.serializers.fields.base import UUIDHyperlinkedIdentityField
class ImageSummarySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.PrimaryKeyRelatedField(
source='created_by',
read_only=True)
url = UUIDHyperlinkedIdentityField(
view_name='api:v2:application-detail',
)
class Meta:
model = Image
fields = ('id', 'url', 'uuid', 'name', 'description', 'icon',
'start_date', 'end_date', 'user')
<commit_msg>Include 'tags' in the Image summary (returned by the /v2/instances detail API)<commit_after>from core.models import Application as Image
from rest_framework import serializers
from api.v2.serializers.fields.base import UUIDHyperlinkedIdentityField
class ImageSummarySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.PrimaryKeyRelatedField(
source='created_by',
read_only=True)
tags = serializers.SerializerMethodField()
url = UUIDHyperlinkedIdentityField(
view_name='api:v2:application-detail',
)
def get_tags(self, obj):
from api.v2.serializers.details import TagSerializer
serializer = TagSerializer(obj.tags.all(), many=True, context=self.context)
return serializer.data
class Meta:
model = Image
fields = ('id', 'url', 'uuid', 'name', 'description', 'icon',
'tags', 'start_date', 'end_date', 'user')
|
ac786779916e39d31582ed538635dc0aa7ee9310
|
karspexet/show/admin.py
|
karspexet/show/admin.py
|
from django.contrib import admin
from karspexet.show.models import Production, Show
@admin.register(Production)
class ProductionAdmin(admin.ModelAdmin):
list_display = ("name", "alt_name")
@admin.register(Show)
class ShowAdmin(admin.ModelAdmin):
list_display = ("production", "slug", "date_string")
list_filter = ("production",)
exclude = ("slug",)
ordering = ("-pk",)
|
from django.contrib import admin
from django.utils import timezone
from karspexet.show.models import Production, Show
@admin.register(Production)
class ProductionAdmin(admin.ModelAdmin):
list_display = ("name", "alt_name")
@admin.register(Show)
class ShowAdmin(admin.ModelAdmin):
list_display = ("date_string", "production", "venue", "visible", "is_upcoming")
list_select_related = ("production", "venue")
list_filter = ("visible", "production")
exclude = ("slug",)
ordering = ("-pk",)
@admin.display(boolean=True)
def is_upcoming(self, obj):
return obj.date > timezone.now()
|
Improve ShowAdmin to give better overview
|
Improve ShowAdmin to give better overview
|
Python
|
mit
|
Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet
|
from django.contrib import admin
from karspexet.show.models import Production, Show
@admin.register(Production)
class ProductionAdmin(admin.ModelAdmin):
list_display = ("name", "alt_name")
@admin.register(Show)
class ShowAdmin(admin.ModelAdmin):
list_display = ("production", "slug", "date_string")
list_filter = ("production",)
exclude = ("slug",)
ordering = ("-pk",)
Improve ShowAdmin to give better overview
|
from django.contrib import admin
from django.utils import timezone
from karspexet.show.models import Production, Show
@admin.register(Production)
class ProductionAdmin(admin.ModelAdmin):
list_display = ("name", "alt_name")
@admin.register(Show)
class ShowAdmin(admin.ModelAdmin):
list_display = ("date_string", "production", "venue", "visible", "is_upcoming")
list_select_related = ("production", "venue")
list_filter = ("visible", "production")
exclude = ("slug",)
ordering = ("-pk",)
@admin.display(boolean=True)
def is_upcoming(self, obj):
return obj.date > timezone.now()
|
<commit_before>from django.contrib import admin
from karspexet.show.models import Production, Show
@admin.register(Production)
class ProductionAdmin(admin.ModelAdmin):
list_display = ("name", "alt_name")
@admin.register(Show)
class ShowAdmin(admin.ModelAdmin):
list_display = ("production", "slug", "date_string")
list_filter = ("production",)
exclude = ("slug",)
ordering = ("-pk",)
<commit_msg>Improve ShowAdmin to give better overview<commit_after>
|
from django.contrib import admin
from django.utils import timezone
from karspexet.show.models import Production, Show
@admin.register(Production)
class ProductionAdmin(admin.ModelAdmin):
list_display = ("name", "alt_name")
@admin.register(Show)
class ShowAdmin(admin.ModelAdmin):
list_display = ("date_string", "production", "venue", "visible", "is_upcoming")
list_select_related = ("production", "venue")
list_filter = ("visible", "production")
exclude = ("slug",)
ordering = ("-pk",)
@admin.display(boolean=True)
def is_upcoming(self, obj):
return obj.date > timezone.now()
|
from django.contrib import admin
from karspexet.show.models import Production, Show
@admin.register(Production)
class ProductionAdmin(admin.ModelAdmin):
list_display = ("name", "alt_name")
@admin.register(Show)
class ShowAdmin(admin.ModelAdmin):
list_display = ("production", "slug", "date_string")
list_filter = ("production",)
exclude = ("slug",)
ordering = ("-pk",)
Improve ShowAdmin to give better overviewfrom django.contrib import admin
from django.utils import timezone
from karspexet.show.models import Production, Show
@admin.register(Production)
class ProductionAdmin(admin.ModelAdmin):
list_display = ("name", "alt_name")
@admin.register(Show)
class ShowAdmin(admin.ModelAdmin):
list_display = ("date_string", "production", "venue", "visible", "is_upcoming")
list_select_related = ("production", "venue")
list_filter = ("visible", "production")
exclude = ("slug",)
ordering = ("-pk",)
@admin.display(boolean=True)
def is_upcoming(self, obj):
return obj.date > timezone.now()
|
<commit_before>from django.contrib import admin
from karspexet.show.models import Production, Show
@admin.register(Production)
class ProductionAdmin(admin.ModelAdmin):
list_display = ("name", "alt_name")
@admin.register(Show)
class ShowAdmin(admin.ModelAdmin):
list_display = ("production", "slug", "date_string")
list_filter = ("production",)
exclude = ("slug",)
ordering = ("-pk",)
<commit_msg>Improve ShowAdmin to give better overview<commit_after>from django.contrib import admin
from django.utils import timezone
from karspexet.show.models import Production, Show
@admin.register(Production)
class ProductionAdmin(admin.ModelAdmin):
list_display = ("name", "alt_name")
@admin.register(Show)
class ShowAdmin(admin.ModelAdmin):
list_display = ("date_string", "production", "venue", "visible", "is_upcoming")
list_select_related = ("production", "venue")
list_filter = ("visible", "production")
exclude = ("slug",)
ordering = ("-pk",)
@admin.display(boolean=True)
def is_upcoming(self, obj):
return obj.date > timezone.now()
|
62bbc01940e85e6017b4b5d4e757437b05c81f71
|
evaluation_system/reports/views.py
|
evaluation_system/reports/views.py
|
from django.shortcuts import render
from django.views.generic import TemplateView
from django.shortcuts import redirect
from ..evaluation.models import Evaluation, Group_User
class showProfessorReport(TemplateView):
template_name= "report/professorReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "professor":
return redirect("/")
context = self.get_context_data(**kwargs)
context["evaluations"] = Evaluation.objects.filter(course__professor = request.user)
return self.render_to_response(context)
class showStudentReport(TemplateView):
template_name= "report/studentReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "student":
return redirect("/")
context = self.get_context_data(**kwargs)
group = Group_User.objects.filter(student = request.user)
context["group"] = group
if not group:
context['error'] = "You're not assigned to any courses"
return self.render_to_response(context)
|
from django.shortcuts import render
from django.views.generic import TemplateView
from django.shortcuts import redirect
from ..evaluation.models import Evaluation, Group_User
class showProfessorReport(TemplateView):
template_name= "report/professorReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "professor":
return redirect("/")
context = self.get_context_data(**kwargs)
context["evaluations"] = Evaluation.objects.filter(course__professor = request.user, created_by = request.user.id)
return self.render_to_response(context)
class showStudentReport(TemplateView):
template_name= "report/studentReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "student":
return redirect("/")
context = self.get_context_data(**kwargs)
group = Group_User.objects.filter(student = request.user)
context["group"] = group
if not group:
context['error'] = "You're not assigned to any courses"
return self.render_to_response(context)
|
Fix evaluation query on report
|
Fix evaluation query on report
|
Python
|
mit
|
carlosa54/evaluation_system,carlosa54/evaluation_system,carlosa54/evaluation_system
|
from django.shortcuts import render
from django.views.generic import TemplateView
from django.shortcuts import redirect
from ..evaluation.models import Evaluation, Group_User
class showProfessorReport(TemplateView):
template_name= "report/professorReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "professor":
return redirect("/")
context = self.get_context_data(**kwargs)
context["evaluations"] = Evaluation.objects.filter(course__professor = request.user)
return self.render_to_response(context)
class showStudentReport(TemplateView):
template_name= "report/studentReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "student":
return redirect("/")
context = self.get_context_data(**kwargs)
group = Group_User.objects.filter(student = request.user)
context["group"] = group
if not group:
context['error'] = "You're not assigned to any courses"
return self.render_to_response(context)
Fix evaluation query on report
|
from django.shortcuts import render
from django.views.generic import TemplateView
from django.shortcuts import redirect
from ..evaluation.models import Evaluation, Group_User
class showProfessorReport(TemplateView):
template_name= "report/professorReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "professor":
return redirect("/")
context = self.get_context_data(**kwargs)
context["evaluations"] = Evaluation.objects.filter(course__professor = request.user, created_by = request.user.id)
return self.render_to_response(context)
class showStudentReport(TemplateView):
template_name= "report/studentReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "student":
return redirect("/")
context = self.get_context_data(**kwargs)
group = Group_User.objects.filter(student = request.user)
context["group"] = group
if not group:
context['error'] = "You're not assigned to any courses"
return self.render_to_response(context)
|
<commit_before>from django.shortcuts import render
from django.views.generic import TemplateView
from django.shortcuts import redirect
from ..evaluation.models import Evaluation, Group_User
class showProfessorReport(TemplateView):
template_name= "report/professorReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "professor":
return redirect("/")
context = self.get_context_data(**kwargs)
context["evaluations"] = Evaluation.objects.filter(course__professor = request.user)
return self.render_to_response(context)
class showStudentReport(TemplateView):
template_name= "report/studentReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "student":
return redirect("/")
context = self.get_context_data(**kwargs)
group = Group_User.objects.filter(student = request.user)
context["group"] = group
if not group:
context['error'] = "You're not assigned to any courses"
return self.render_to_response(context)
<commit_msg>Fix evaluation query on report<commit_after>
|
from django.shortcuts import render
from django.views.generic import TemplateView
from django.shortcuts import redirect
from ..evaluation.models import Evaluation, Group_User
class showProfessorReport(TemplateView):
template_name= "report/professorReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "professor":
return redirect("/")
context = self.get_context_data(**kwargs)
context["evaluations"] = Evaluation.objects.filter(course__professor = request.user, created_by = request.user.id)
return self.render_to_response(context)
class showStudentReport(TemplateView):
template_name= "report/studentReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "student":
return redirect("/")
context = self.get_context_data(**kwargs)
group = Group_User.objects.filter(student = request.user)
context["group"] = group
if not group:
context['error'] = "You're not assigned to any courses"
return self.render_to_response(context)
|
from django.shortcuts import render
from django.views.generic import TemplateView
from django.shortcuts import redirect
from ..evaluation.models import Evaluation, Group_User
class showProfessorReport(TemplateView):
template_name= "report/professorReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "professor":
return redirect("/")
context = self.get_context_data(**kwargs)
context["evaluations"] = Evaluation.objects.filter(course__professor = request.user)
return self.render_to_response(context)
class showStudentReport(TemplateView):
template_name= "report/studentReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "student":
return redirect("/")
context = self.get_context_data(**kwargs)
group = Group_User.objects.filter(student = request.user)
context["group"] = group
if not group:
context['error'] = "You're not assigned to any courses"
return self.render_to_response(context)
Fix evaluation query on reportfrom django.shortcuts import render
from django.views.generic import TemplateView
from django.shortcuts import redirect
from ..evaluation.models import Evaluation, Group_User
class showProfessorReport(TemplateView):
template_name= "report/professorReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "professor":
return redirect("/")
context = self.get_context_data(**kwargs)
context["evaluations"] = Evaluation.objects.filter(course__professor = request.user, created_by = request.user.id)
return self.render_to_response(context)
class showStudentReport(TemplateView):
template_name= "report/studentReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "student":
return redirect("/")
context = self.get_context_data(**kwargs)
group = Group_User.objects.filter(student = request.user)
context["group"] = group
if not group:
context['error'] = "You're not assigned to any courses"
return self.render_to_response(context)
|
<commit_before>from django.shortcuts import render
from django.views.generic import TemplateView
from django.shortcuts import redirect
from ..evaluation.models import Evaluation, Group_User
class showProfessorReport(TemplateView):
template_name= "report/professorReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "professor":
return redirect("/")
context = self.get_context_data(**kwargs)
context["evaluations"] = Evaluation.objects.filter(course__professor = request.user)
return self.render_to_response(context)
class showStudentReport(TemplateView):
template_name= "report/studentReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "student":
return redirect("/")
context = self.get_context_data(**kwargs)
group = Group_User.objects.filter(student = request.user)
context["group"] = group
if not group:
context['error'] = "You're not assigned to any courses"
return self.render_to_response(context)
<commit_msg>Fix evaluation query on report<commit_after>from django.shortcuts import render
from django.views.generic import TemplateView
from django.shortcuts import redirect
from ..evaluation.models import Evaluation, Group_User
class showProfessorReport(TemplateView):
template_name= "report/professorReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "professor":
return redirect("/")
context = self.get_context_data(**kwargs)
context["evaluations"] = Evaluation.objects.filter(course__professor = request.user, created_by = request.user.id)
return self.render_to_response(context)
class showStudentReport(TemplateView):
template_name= "report/studentReport.html"
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect("/login")
if not request.user.type == "student":
return redirect("/")
context = self.get_context_data(**kwargs)
group = Group_User.objects.filter(student = request.user)
context["group"] = group
if not group:
context['error'] = "You're not assigned to any courses"
return self.render_to_response(context)
|
44791b285f4c30cbafc93abcce525f52d21e8215
|
Lib/test/test_dbm.py
|
Lib/test/test_dbm.py
|
#! /usr/bin/env python
"""Test script for the dbm module
Roger E. Masse
"""
import dbm
from dbm import error
from test_support import verbose
filename= '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'rw')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
try:
import os
os.unlink(filename + '.dir')
os.unlink(filename + '.pag')
except:
pass
|
#! /usr/bin/env python
"""Test script for the dbm module
Roger E. Masse
"""
import dbm
from dbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'rw')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
try:
import os
if dbm.library == "ndbm":
# classic dbm
os.unlink(filename + '.dir')
os.unlink(filename + '.pag')
elif dbm.library == "BSD db":
# BSD DB's compatibility layer
os.unlink(filename + '.db')
else:
# GNU gdbm compatibility layer
os.unlink(filename)
except:
pass
|
Fix up the cleanup of the temporary DB so it works for BSD DB's compatibility layer as well as "classic" ndbm.
|
Fix up the cleanup of the temporary DB so it works for BSD DB's
compatibility layer as well as "classic" ndbm.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
#! /usr/bin/env python
"""Test script for the dbm module
Roger E. Masse
"""
import dbm
from dbm import error
from test_support import verbose
filename= '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'rw')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
try:
import os
os.unlink(filename + '.dir')
os.unlink(filename + '.pag')
except:
pass
Fix up the cleanup of the temporary DB so it works for BSD DB's
compatibility layer as well as "classic" ndbm.
|
#! /usr/bin/env python
"""Test script for the dbm module
Roger E. Masse
"""
import dbm
from dbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'rw')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
try:
import os
if dbm.library == "ndbm":
# classic dbm
os.unlink(filename + '.dir')
os.unlink(filename + '.pag')
elif dbm.library == "BSD db":
# BSD DB's compatibility layer
os.unlink(filename + '.db')
else:
# GNU gdbm compatibility layer
os.unlink(filename)
except:
pass
|
<commit_before>#! /usr/bin/env python
"""Test script for the dbm module
Roger E. Masse
"""
import dbm
from dbm import error
from test_support import verbose
filename= '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'rw')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
try:
import os
os.unlink(filename + '.dir')
os.unlink(filename + '.pag')
except:
pass
<commit_msg>Fix up the cleanup of the temporary DB so it works for BSD DB's
compatibility layer as well as "classic" ndbm.<commit_after>
|
#! /usr/bin/env python
"""Test script for the dbm module
Roger E. Masse
"""
import dbm
from dbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'rw')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
try:
import os
if dbm.library == "ndbm":
# classic dbm
os.unlink(filename + '.dir')
os.unlink(filename + '.pag')
elif dbm.library == "BSD db":
# BSD DB's compatibility layer
os.unlink(filename + '.db')
else:
# GNU gdbm compatibility layer
os.unlink(filename)
except:
pass
|
#! /usr/bin/env python
"""Test script for the dbm module
Roger E. Masse
"""
import dbm
from dbm import error
from test_support import verbose
filename= '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'rw')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
try:
import os
os.unlink(filename + '.dir')
os.unlink(filename + '.pag')
except:
pass
Fix up the cleanup of the temporary DB so it works for BSD DB's
compatibility layer as well as "classic" ndbm.#! /usr/bin/env python
"""Test script for the dbm module
Roger E. Masse
"""
import dbm
from dbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'rw')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
try:
import os
if dbm.library == "ndbm":
# classic dbm
os.unlink(filename + '.dir')
os.unlink(filename + '.pag')
elif dbm.library == "BSD db":
# BSD DB's compatibility layer
os.unlink(filename + '.db')
else:
# GNU gdbm compatibility layer
os.unlink(filename)
except:
pass
|
<commit_before>#! /usr/bin/env python
"""Test script for the dbm module
Roger E. Masse
"""
import dbm
from dbm import error
from test_support import verbose
filename= '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'rw')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
try:
import os
os.unlink(filename + '.dir')
os.unlink(filename + '.pag')
except:
pass
<commit_msg>Fix up the cleanup of the temporary DB so it works for BSD DB's
compatibility layer as well as "classic" ndbm.<commit_after>#! /usr/bin/env python
"""Test script for the dbm module
Roger E. Masse
"""
import dbm
from dbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'rw')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
try:
import os
if dbm.library == "ndbm":
# classic dbm
os.unlink(filename + '.dir')
os.unlink(filename + '.pag')
elif dbm.library == "BSD db":
# BSD DB's compatibility layer
os.unlink(filename + '.db')
else:
# GNU gdbm compatibility layer
os.unlink(filename)
except:
pass
|
871b2fb4b49f10305ac4817856e0873283c67d08
|
reactlibapp/reactlibapp/settings/development.py
|
reactlibapp/reactlibapp/settings/development.py
|
import os, sys
# Production specific settings
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
'PORT': os.environ.get('DB_PORT'),
'TEST': {
'CHARSET': None,
'COLLATION': None,
'NAME': os.path.join(os.path.dirname(__file__), 'test.db'),
'MIRROR': None
}
},
}
|
import os, sys
# Production specific settings
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
'PORT': os.environ.get('DB_PORT'),
'TEST': {
'CHARSET': None,
'COLLATION': None,
'NAME': os.path.join(os.path.dirname(__file__), 'test.db'),
'MIRROR': None
}
},
}
|
Remove redundant space on new line
|
Remove redundant space on new line
|
Python
|
mit
|
andela-sjames/Django-ReactJS-Library-App,andela-sjames/Django-ReactJS-Library-App,andela-sjames/Django-ReactJS-Library-App,andela-sjames/Django-ReactJS-Library-App
|
import os, sys
# Production specific settings
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
'PORT': os.environ.get('DB_PORT'),
'TEST': {
'CHARSET': None,
'COLLATION': None,
'NAME': os.path.join(os.path.dirname(__file__), 'test.db'),
'MIRROR': None
}
},
}
Remove redundant space on new line
|
import os, sys
# Production specific settings
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
'PORT': os.environ.get('DB_PORT'),
'TEST': {
'CHARSET': None,
'COLLATION': None,
'NAME': os.path.join(os.path.dirname(__file__), 'test.db'),
'MIRROR': None
}
},
}
|
<commit_before>import os, sys
# Production specific settings
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
'PORT': os.environ.get('DB_PORT'),
'TEST': {
'CHARSET': None,
'COLLATION': None,
'NAME': os.path.join(os.path.dirname(__file__), 'test.db'),
'MIRROR': None
}
},
}
<commit_msg>Remove redundant space on new line<commit_after>
|
import os, sys
# Production specific settings
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
'PORT': os.environ.get('DB_PORT'),
'TEST': {
'CHARSET': None,
'COLLATION': None,
'NAME': os.path.join(os.path.dirname(__file__), 'test.db'),
'MIRROR': None
}
},
}
|
import os, sys
# Production specific settings
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
'PORT': os.environ.get('DB_PORT'),
'TEST': {
'CHARSET': None,
'COLLATION': None,
'NAME': os.path.join(os.path.dirname(__file__), 'test.db'),
'MIRROR': None
}
},
}
Remove redundant space on new lineimport os, sys
# Production specific settings
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
'PORT': os.environ.get('DB_PORT'),
'TEST': {
'CHARSET': None,
'COLLATION': None,
'NAME': os.path.join(os.path.dirname(__file__), 'test.db'),
'MIRROR': None
}
},
}
|
<commit_before>import os, sys
# Production specific settings
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
'PORT': os.environ.get('DB_PORT'),
'TEST': {
'CHARSET': None,
'COLLATION': None,
'NAME': os.path.join(os.path.dirname(__file__), 'test.db'),
'MIRROR': None
}
},
}
<commit_msg>Remove redundant space on new line<commit_after>import os, sys
# Production specific settings
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
'PORT': os.environ.get('DB_PORT'),
'TEST': {
'CHARSET': None,
'COLLATION': None,
'NAME': os.path.join(os.path.dirname(__file__), 'test.db'),
'MIRROR': None
}
},
}
|
24998a6ca73f29c5380d875cf9b8da69b8d1e8f0
|
erpnext/patches/v4_2/repost_reserved_qty.py
|
erpnext/patches/v4_2/repost_reserved_qty.py
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.utilities.repost_stock import update_bin_qty, get_reserved_qty
def execute():
repost_for = frappe.db.sql("""
select
distinct item_code, warehouse
from
(
(
select distinct item_code, warehouse
from `tabSales Order Item` where docstatus=1
) UNION (
select distinct item_code, warehouse
from `tabPacked Item` where docstatus=1 and parenttype='Sales Order'
)
) so_item
where
exists(select name from tabItem where name=so_item.item_code and ifnull(is_stock_item, 0)=1)
""")
for item_code, warehouse in repost_for:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.utilities.repost_stock import update_bin_qty, get_reserved_qty
def execute():
repost_for = frappe.db.sql("""
select
distinct item_code, warehouse
from
(
(
select distinct item_code, warehouse
from `tabSales Order Item` where docstatus=1
) UNION (
select distinct item_code, warehouse
from `tabPacked Item` where docstatus=1 and parenttype='Sales Order'
)
) so_item
where
exists(select name from tabItem where name=so_item.item_code and ifnull(is_stock_item, 0)=1)
""")
for item_code, warehouse in repost_for:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
frappe.db.sql("""delete from tabBin
where exists(
select name from tabItem where name=tabBin.item_code and ifnull(is_stock_item, 0) = 0
)
""")
|
Delete Bin for non-stock item
|
[fix][patch] Delete Bin for non-stock item
|
Python
|
agpl-3.0
|
hanselke/erpnext-1,mahabuber/erpnext,anandpdoshi/erpnext,fuhongliang/erpnext,geekroot/erpnext,mbauskar/omnitech-demo-erpnext,meisterkleister/erpnext,hatwar/buyback-erpnext,njmube/erpnext,gangadharkadam/v6_erp,SPKian/Testing,meisterkleister/erpnext,shft117/SteckerApp,SPKian/Testing2,gangadhar-kadam/helpdesk-erpnext,Aptitudetech/ERPNext,indictranstech/osmosis-erpnext,mbauskar/omnitech-erpnext,gangadharkadam/saloon_erp,Tejal011089/huntercamp_erpnext,shft117/SteckerApp,mbauskar/sapphire-erpnext,njmube/erpnext,susuchina/ERPNEXT,gsnbng/erpnext,pombredanne/erpnext,gangadharkadam/saloon_erp,aruizramon/alec_erpnext,SPKian/Testing2,mbauskar/omnitech-demo-erpnext,mbauskar/sapphire-erpnext,mbauskar/omnitech-demo-erpnext,mbauskar/helpdesk-erpnext,mbauskar/omnitech-demo-erpnext,njmube/erpnext,aruizramon/alec_erpnext,gangadharkadam/contributionerp,hatwar/buyback-erpnext,indictranstech/biggift-erpnext,mbauskar/helpdesk-erpnext,sheafferusa/erpnext,indictranstech/biggift-erpnext,mbauskar/alec_frappe5_erpnext,njmube/erpnext,pombredanne/erpnext,gangadharkadam/v6_erp,sagar30051991/ozsmart-erp,Tejal011089/huntercamp_erpnext,indictranstech/reciphergroup-erpnext,indictranstech/osmosis-erpnext,susuchina/ERPNEXT,mahabuber/erpnext,sagar30051991/ozsmart-erp,SPKian/Testing,mbauskar/alec_frappe5_erpnext,mbauskar/omnitech-erpnext,sheafferusa/erpnext,mbauskar/helpdesk-erpnext,sheafferusa/erpnext,mbauskar/omnitech-erpnext,gangadharkadam/saloon_erp_install,SPKian/Testing,gangadharkadam/saloon_erp,aruizramon/alec_erpnext,susuchina/ERPNEXT,mbauskar/omnitech-erpnext,gangadhar-kadam/helpdesk-erpnext,gangadharkadam/saloon_erp_install,indictranstech/biggift-erpnext,indictranstech/reciphergroup-erpnext,hernad/erpnext,sagar30051991/ozsmart-erp,gmarke/erpnext,mahabuber/erpnext,hatwar/buyback-erpnext,gsnbng/erpnext,anandpdoshi/erpnext,MartinEnder/erpnext-de,MartinEnder/erpnext-de,SPKian/Testing2,anandpdoshi/erpnext,gmarke/erpnext,MartinEnder/erpnext-de,SPKian/Testing2,gsnbng/erpnext,mbauskar/helpdesk-erpnext,geekroot/erpnext,SPKian/Testing,hanselke/erpnext-1,mbauskar/alec_frappe5_erpnext,mbauskar/sapphire-erpnext,hernad/erpnext,gangadhar-kadam/helpdesk-erpnext,gangadharkadam/saloon_erp,indictranstech/reciphergroup-erpnext,fuhongliang/erpnext,gangadharkadam/contributionerp,hatwar/buyback-erpnext,indictranstech/trufil-erpnext,fuhongliang/erpnext,ShashaQin/erpnext,hanselke/erpnext-1,gmarke/erpnext,gsnbng/erpnext,hernad/erpnext,indictranstech/erpnext,pombredanne/erpnext,ShashaQin/erpnext,gangadharkadam/saloon_erp_install,gangadharkadam/contributionerp,indictranstech/trufil-erpnext,indictranstech/trufil-erpnext,aruizramon/alec_erpnext,fuhongliang/erpnext,Tejal011089/huntercamp_erpnext,indictranstech/osmosis-erpnext,indictranstech/osmosis-erpnext,indictranstech/reciphergroup-erpnext,pombredanne/erpnext,gangadharkadam/contributionerp,gangadharkadam/v6_erp,indictranstech/erpnext,gmarke/erpnext,anandpdoshi/erpnext,hernad/erpnext,mbauskar/sapphire-erpnext,sheafferusa/erpnext,hanselke/erpnext-1,gangadhar-kadam/helpdesk-erpnext,geekroot/erpnext,indictranstech/erpnext,sagar30051991/ozsmart-erp,mbauskar/alec_frappe5_erpnext,meisterkleister/erpnext,mahabuber/erpnext,gangadharkadam/v6_erp,susuchina/ERPNEXT,shft117/SteckerApp,ShashaQin/erpnext,meisterkleister/erpnext,shft117/SteckerApp,indictranstech/erpnext,indictranstech/trufil-erpnext,MartinEnder/erpnext-de,gangadharkadam/saloon_erp_install,indictranstech/biggift-erpnext,geekroot/erpnext,Tejal011089/huntercamp_erpnext,ShashaQin/erpnext
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.utilities.repost_stock import update_bin_qty, get_reserved_qty
def execute():
repost_for = frappe.db.sql("""
select
distinct item_code, warehouse
from
(
(
select distinct item_code, warehouse
from `tabSales Order Item` where docstatus=1
) UNION (
select distinct item_code, warehouse
from `tabPacked Item` where docstatus=1 and parenttype='Sales Order'
)
) so_item
where
exists(select name from tabItem where name=so_item.item_code and ifnull(is_stock_item, 0)=1)
""")
for item_code, warehouse in repost_for:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})[fix][patch] Delete Bin for non-stock item
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.utilities.repost_stock import update_bin_qty, get_reserved_qty
def execute():
repost_for = frappe.db.sql("""
select
distinct item_code, warehouse
from
(
(
select distinct item_code, warehouse
from `tabSales Order Item` where docstatus=1
) UNION (
select distinct item_code, warehouse
from `tabPacked Item` where docstatus=1 and parenttype='Sales Order'
)
) so_item
where
exists(select name from tabItem where name=so_item.item_code and ifnull(is_stock_item, 0)=1)
""")
for item_code, warehouse in repost_for:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
frappe.db.sql("""delete from tabBin
where exists(
select name from tabItem where name=tabBin.item_code and ifnull(is_stock_item, 0) = 0
)
""")
|
<commit_before># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.utilities.repost_stock import update_bin_qty, get_reserved_qty
def execute():
repost_for = frappe.db.sql("""
select
distinct item_code, warehouse
from
(
(
select distinct item_code, warehouse
from `tabSales Order Item` where docstatus=1
) UNION (
select distinct item_code, warehouse
from `tabPacked Item` where docstatus=1 and parenttype='Sales Order'
)
) so_item
where
exists(select name from tabItem where name=so_item.item_code and ifnull(is_stock_item, 0)=1)
""")
for item_code, warehouse in repost_for:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})<commit_msg>[fix][patch] Delete Bin for non-stock item<commit_after>
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.utilities.repost_stock import update_bin_qty, get_reserved_qty
def execute():
repost_for = frappe.db.sql("""
select
distinct item_code, warehouse
from
(
(
select distinct item_code, warehouse
from `tabSales Order Item` where docstatus=1
) UNION (
select distinct item_code, warehouse
from `tabPacked Item` where docstatus=1 and parenttype='Sales Order'
)
) so_item
where
exists(select name from tabItem where name=so_item.item_code and ifnull(is_stock_item, 0)=1)
""")
for item_code, warehouse in repost_for:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
frappe.db.sql("""delete from tabBin
where exists(
select name from tabItem where name=tabBin.item_code and ifnull(is_stock_item, 0) = 0
)
""")
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.utilities.repost_stock import update_bin_qty, get_reserved_qty
def execute():
repost_for = frappe.db.sql("""
select
distinct item_code, warehouse
from
(
(
select distinct item_code, warehouse
from `tabSales Order Item` where docstatus=1
) UNION (
select distinct item_code, warehouse
from `tabPacked Item` where docstatus=1 and parenttype='Sales Order'
)
) so_item
where
exists(select name from tabItem where name=so_item.item_code and ifnull(is_stock_item, 0)=1)
""")
for item_code, warehouse in repost_for:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})[fix][patch] Delete Bin for non-stock item# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.utilities.repost_stock import update_bin_qty, get_reserved_qty
def execute():
repost_for = frappe.db.sql("""
select
distinct item_code, warehouse
from
(
(
select distinct item_code, warehouse
from `tabSales Order Item` where docstatus=1
) UNION (
select distinct item_code, warehouse
from `tabPacked Item` where docstatus=1 and parenttype='Sales Order'
)
) so_item
where
exists(select name from tabItem where name=so_item.item_code and ifnull(is_stock_item, 0)=1)
""")
for item_code, warehouse in repost_for:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
frappe.db.sql("""delete from tabBin
where exists(
select name from tabItem where name=tabBin.item_code and ifnull(is_stock_item, 0) = 0
)
""")
|
<commit_before># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.utilities.repost_stock import update_bin_qty, get_reserved_qty
def execute():
repost_for = frappe.db.sql("""
select
distinct item_code, warehouse
from
(
(
select distinct item_code, warehouse
from `tabSales Order Item` where docstatus=1
) UNION (
select distinct item_code, warehouse
from `tabPacked Item` where docstatus=1 and parenttype='Sales Order'
)
) so_item
where
exists(select name from tabItem where name=so_item.item_code and ifnull(is_stock_item, 0)=1)
""")
for item_code, warehouse in repost_for:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})<commit_msg>[fix][patch] Delete Bin for non-stock item<commit_after># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.utilities.repost_stock import update_bin_qty, get_reserved_qty
def execute():
repost_for = frappe.db.sql("""
select
distinct item_code, warehouse
from
(
(
select distinct item_code, warehouse
from `tabSales Order Item` where docstatus=1
) UNION (
select distinct item_code, warehouse
from `tabPacked Item` where docstatus=1 and parenttype='Sales Order'
)
) so_item
where
exists(select name from tabItem where name=so_item.item_code and ifnull(is_stock_item, 0)=1)
""")
for item_code, warehouse in repost_for:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
frappe.db.sql("""delete from tabBin
where exists(
select name from tabItem where name=tabBin.item_code and ifnull(is_stock_item, 0) = 0
)
""")
|
aaaaac53d996ff5ed1f39cbed583079e26150443
|
falcom/api/hathi/from_json.py
|
falcom/api/hathi/from_json.py
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
import json
from .data import HathiData
def load_json (json_data):
try:
return json.loads(json_data)
except:
return { }
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
import json
from .data import HathiData
EMPTY_JSON_DATA = { }
def load_json (json_data):
try:
return json.loads(json_data)
except:
return EMPTY_JSON_DATA
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
|
Add named constant to explain why { } default
|
Add named constant to explain why { } default
|
Python
|
bsd-3-clause
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
import json
from .data import HathiData
def load_json (json_data):
try:
return json.loads(json_data)
except:
return { }
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
Add named constant to explain why { } default
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
import json
from .data import HathiData
EMPTY_JSON_DATA = { }
def load_json (json_data):
try:
return json.loads(json_data)
except:
return EMPTY_JSON_DATA
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
|
<commit_before># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
import json
from .data import HathiData
def load_json (json_data):
try:
return json.loads(json_data)
except:
return { }
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
<commit_msg>Add named constant to explain why { } default<commit_after>
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
import json
from .data import HathiData
EMPTY_JSON_DATA = { }
def load_json (json_data):
try:
return json.loads(json_data)
except:
return EMPTY_JSON_DATA
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
import json
from .data import HathiData
def load_json (json_data):
try:
return json.loads(json_data)
except:
return { }
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
Add named constant to explain why { } default# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
import json
from .data import HathiData
EMPTY_JSON_DATA = { }
def load_json (json_data):
try:
return json.loads(json_data)
except:
return EMPTY_JSON_DATA
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
|
<commit_before># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
import json
from .data import HathiData
def load_json (json_data):
try:
return json.loads(json_data)
except:
return { }
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
<commit_msg>Add named constant to explain why { } default<commit_after># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
import json
from .data import HathiData
EMPTY_JSON_DATA = { }
def load_json (json_data):
try:
return json.loads(json_data)
except:
return EMPTY_JSON_DATA
def get_None_if_empty (container):
return container if container else None
def title_lists_in_data (data):
return (x.get("titles", ()) for x in data.get("records", {}).values())
def get_titles_from_data (data):
result = [ ]
for title_list in title_lists_in_data(data):
result.extend(title_list)
return get_None_if_empty(result)
def htids_in_data (data):
return [x["htid"] for x in data.get("items", []) if "htid" in x]
def get_htids_from_data (data):
return get_None_if_empty(htids_in_data(data))
def get_hathi_data_from_json (json_data = ""):
data = load_json(json_data)
return HathiData(titles=get_titles_from_data(data),
htids=get_htids_from_data(data))
|
2cd1df93ec3e93fb5f787be5160a50e9f295211f
|
examples/plot_estimate_covariance_matrix.py
|
examples/plot_estimate_covariance_matrix.py
|
"""
==============================================
Estimate covariance matrix from a raw FIF file
==============================================
"""
# Author: Alexandre Gramfort <gramfort@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
print __doc__
import mne
from mne import fiff
from mne.datasets import sample
data_path = sample.data_path('.')
fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
raw = fiff.Raw(fname)
# Set up pick list: MEG + STI 014 - bad channels
cov = mne.compute_raw_data_covariance(raw, reject=dict(eeg=80e-6, eog=150e-6))
print cov
###############################################################################
# Show covariance
import pylab as pl
pl.figure()
pl.imshow(cov.data, interpolation="nearest")
pl.title('Full covariance matrix')
pl.show()
|
"""
==============================================
Estimate covariance matrix from a raw FIF file
==============================================
"""
# Author: Alexandre Gramfort <gramfort@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
print __doc__
import mne
from mne import fiff
from mne.datasets import sample
data_path = sample.data_path('.')
fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
raw = fiff.Raw(fname)
# Compute the covariance from the raw data
cov = mne.compute_raw_data_covariance(raw, reject=dict(eeg=80e-6, eog=150e-6))
print cov
###############################################################################
# Show covariance
import pylab as pl
pl.figure()
pl.imshow(cov.data, interpolation="nearest")
pl.title('Full covariance matrix')
pl.show()
|
FIX : fix text in cov estimation example
|
FIX : fix text in cov estimation example
|
Python
|
bsd-3-clause
|
matthew-tucker/mne-python,cjayb/mne-python,wmvanvliet/mne-python,bloyl/mne-python,dgwakeman/mne-python,lorenzo-desantis/mne-python,drammock/mne-python,andyh616/mne-python,mne-tools/mne-python,aestrivex/mne-python,jaeilepp/mne-python,agramfort/mne-python,teonlamont/mne-python,kingjr/mne-python,yousrabk/mne-python,pravsripad/mne-python,kambysese/mne-python,larsoner/mne-python,kingjr/mne-python,cmoutard/mne-python,larsoner/mne-python,jmontoyam/mne-python,agramfort/mne-python,Odingod/mne-python,kingjr/mne-python,antiface/mne-python,aestrivex/mne-python,cmoutard/mne-python,trachelr/mne-python,ARudiuk/mne-python,olafhauk/mne-python,rkmaddox/mne-python,matthew-tucker/mne-python,alexandrebarachant/mne-python,jmontoyam/mne-python,larsoner/mne-python,antiface/mne-python,alexandrebarachant/mne-python,bloyl/mne-python,dgwakeman/mne-python,trachelr/mne-python,olafhauk/mne-python,ARudiuk/mne-python,olafhauk/mne-python,teonlamont/mne-python,leggitta/mne-python,Teekuningas/mne-python,wronk/mne-python,nicproulx/mne-python,adykstra/mne-python,pravsripad/mne-python,leggitta/mne-python,yousrabk/mne-python,rkmaddox/mne-python,effigies/mne-python,drammock/mne-python,mne-tools/mne-python,wmvanvliet/mne-python,Teekuningas/mne-python,drammock/mne-python,cjayb/mne-python,Eric89GXL/mne-python,wmvanvliet/mne-python,mne-tools/mne-python,dimkal/mne-python,effigies/mne-python,jniediek/mne-python,andyh616/mne-python,nicproulx/mne-python,dimkal/mne-python,Odingod/mne-python,pravsripad/mne-python,jniediek/mne-python,jaeilepp/mne-python,Teekuningas/mne-python,kambysese/mne-python,wronk/mne-python,Eric89GXL/mne-python,lorenzo-desantis/mne-python,adykstra/mne-python
|
"""
==============================================
Estimate covariance matrix from a raw FIF file
==============================================
"""
# Author: Alexandre Gramfort <gramfort@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
print __doc__
import mne
from mne import fiff
from mne.datasets import sample
data_path = sample.data_path('.')
fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
raw = fiff.Raw(fname)
# Set up pick list: MEG + STI 014 - bad channels
cov = mne.compute_raw_data_covariance(raw, reject=dict(eeg=80e-6, eog=150e-6))
print cov
###############################################################################
# Show covariance
import pylab as pl
pl.figure()
pl.imshow(cov.data, interpolation="nearest")
pl.title('Full covariance matrix')
pl.show()
FIX : fix text in cov estimation example
|
"""
==============================================
Estimate covariance matrix from a raw FIF file
==============================================
"""
# Author: Alexandre Gramfort <gramfort@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
print __doc__
import mne
from mne import fiff
from mne.datasets import sample
data_path = sample.data_path('.')
fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
raw = fiff.Raw(fname)
# Compute the covariance from the raw data
cov = mne.compute_raw_data_covariance(raw, reject=dict(eeg=80e-6, eog=150e-6))
print cov
###############################################################################
# Show covariance
import pylab as pl
pl.figure()
pl.imshow(cov.data, interpolation="nearest")
pl.title('Full covariance matrix')
pl.show()
|
<commit_before>"""
==============================================
Estimate covariance matrix from a raw FIF file
==============================================
"""
# Author: Alexandre Gramfort <gramfort@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
print __doc__
import mne
from mne import fiff
from mne.datasets import sample
data_path = sample.data_path('.')
fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
raw = fiff.Raw(fname)
# Set up pick list: MEG + STI 014 - bad channels
cov = mne.compute_raw_data_covariance(raw, reject=dict(eeg=80e-6, eog=150e-6))
print cov
###############################################################################
# Show covariance
import pylab as pl
pl.figure()
pl.imshow(cov.data, interpolation="nearest")
pl.title('Full covariance matrix')
pl.show()
<commit_msg>FIX : fix text in cov estimation example<commit_after>
|
"""
==============================================
Estimate covariance matrix from a raw FIF file
==============================================
"""
# Author: Alexandre Gramfort <gramfort@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
print __doc__
import mne
from mne import fiff
from mne.datasets import sample
data_path = sample.data_path('.')
fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
raw = fiff.Raw(fname)
# Compute the covariance from the raw data
cov = mne.compute_raw_data_covariance(raw, reject=dict(eeg=80e-6, eog=150e-6))
print cov
###############################################################################
# Show covariance
import pylab as pl
pl.figure()
pl.imshow(cov.data, interpolation="nearest")
pl.title('Full covariance matrix')
pl.show()
|
"""
==============================================
Estimate covariance matrix from a raw FIF file
==============================================
"""
# Author: Alexandre Gramfort <gramfort@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
print __doc__
import mne
from mne import fiff
from mne.datasets import sample
data_path = sample.data_path('.')
fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
raw = fiff.Raw(fname)
# Set up pick list: MEG + STI 014 - bad channels
cov = mne.compute_raw_data_covariance(raw, reject=dict(eeg=80e-6, eog=150e-6))
print cov
###############################################################################
# Show covariance
import pylab as pl
pl.figure()
pl.imshow(cov.data, interpolation="nearest")
pl.title('Full covariance matrix')
pl.show()
FIX : fix text in cov estimation example"""
==============================================
Estimate covariance matrix from a raw FIF file
==============================================
"""
# Author: Alexandre Gramfort <gramfort@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
print __doc__
import mne
from mne import fiff
from mne.datasets import sample
data_path = sample.data_path('.')
fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
raw = fiff.Raw(fname)
# Compute the covariance from the raw data
cov = mne.compute_raw_data_covariance(raw, reject=dict(eeg=80e-6, eog=150e-6))
print cov
###############################################################################
# Show covariance
import pylab as pl
pl.figure()
pl.imshow(cov.data, interpolation="nearest")
pl.title('Full covariance matrix')
pl.show()
|
<commit_before>"""
==============================================
Estimate covariance matrix from a raw FIF file
==============================================
"""
# Author: Alexandre Gramfort <gramfort@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
print __doc__
import mne
from mne import fiff
from mne.datasets import sample
data_path = sample.data_path('.')
fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
raw = fiff.Raw(fname)
# Set up pick list: MEG + STI 014 - bad channels
cov = mne.compute_raw_data_covariance(raw, reject=dict(eeg=80e-6, eog=150e-6))
print cov
###############################################################################
# Show covariance
import pylab as pl
pl.figure()
pl.imshow(cov.data, interpolation="nearest")
pl.title('Full covariance matrix')
pl.show()
<commit_msg>FIX : fix text in cov estimation example<commit_after>"""
==============================================
Estimate covariance matrix from a raw FIF file
==============================================
"""
# Author: Alexandre Gramfort <gramfort@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
print __doc__
import mne
from mne import fiff
from mne.datasets import sample
data_path = sample.data_path('.')
fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
raw = fiff.Raw(fname)
# Compute the covariance from the raw data
cov = mne.compute_raw_data_covariance(raw, reject=dict(eeg=80e-6, eog=150e-6))
print cov
###############################################################################
# Show covariance
import pylab as pl
pl.figure()
pl.imshow(cov.data, interpolation="nearest")
pl.title('Full covariance matrix')
pl.show()
|
98ca748996fe462cedf284ad91a74bdd30eb81f3
|
mopidy/__init__.py
|
mopidy/__init__.py
|
from __future__ import absolute_import, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
|
from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
|
Use print function instead of print statement
|
py3: Use print function instead of print statement
|
Python
|
apache-2.0
|
jcass77/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,vrs01/mopidy,jcass77/mopidy,diandiankan/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,rawdlite/mopidy,jcass77/mopidy,jmarsik/mopidy,mopidy/mopidy,bencevans/mopidy,mopidy/mopidy,diandiankan/mopidy,jmarsik/mopidy,vrs01/mopidy,mokieyue/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,ali/mopidy,ali/mopidy,tkem/mopidy,hkariti/mopidy,glogiotatidis/mopidy,quartz55/mopidy,kingosticks/mopidy,rawdlite/mopidy,bencevans/mopidy,quartz55/mopidy,bencevans/mopidy,swak/mopidy,rawdlite/mopidy,dbrgn/mopidy,bacontext/mopidy,jodal/mopidy,ZenithDK/mopidy,diandiankan/mopidy,priestd09/mopidy,hkariti/mopidy,kingosticks/mopidy,adamcik/mopidy,jodal/mopidy,pacificIT/mopidy,quartz55/mopidy,mopidy/mopidy,swak/mopidy,priestd09/mopidy,ali/mopidy,pacificIT/mopidy,adamcik/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,bacontext/mopidy,pacificIT/mopidy,pacificIT/mopidy,bacontext/mopidy,tkem/mopidy,hkariti/mopidy,swak/mopidy,mokieyue/mopidy,ZenithDK/mopidy,bacontext/mopidy,rawdlite/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,jmarsik/mopidy,swak/mopidy,diandiankan/mopidy,priestd09/mopidy,SuperStarPL/mopidy,vrs01/mopidy,quartz55/mopidy,adamcik/mopidy,glogiotatidis/mopidy,jodal/mopidy,tkem/mopidy,jmarsik/mopidy,dbrgn/mopidy,hkariti/mopidy,vrs01/mopidy,bencevans/mopidy,tkem/mopidy,ali/mopidy
|
from __future__ import absolute_import, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
py3: Use print function instead of print statement
|
from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
|
<commit_before>from __future__ import absolute_import, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
<commit_msg>py3: Use print function instead of print statement<commit_after>
|
from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
|
from __future__ import absolute_import, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
py3: Use print function instead of print statementfrom __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
|
<commit_before>from __future__ import absolute_import, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
<commit_msg>py3: Use print function instead of print statement<commit_after>from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
|
77af150756021ac4027e290b5d538e0525d812b9
|
mopidy/settings.py
|
mopidy/settings.py
|
CONSOLE_LOG_FORMAT = u'%(levelname)-8s %(asctime)s %(name)s\n %(message)s'
MPD_LINE_ENCODING = u'utf-8'
MPD_LINE_TERMINATOR = u'\n'
MPD_SERVER_HOSTNAME = u'localhost'
MPD_SERVER_PORT = 6600
SPOTIFY_USERNAME = u''
SPOTIFY_PASSWORD = u''
try:
from mopidy.local_settings import *
except ImportError:
pass
|
CONSOLE_LOG_FORMAT = u'%(levelname)-8s %(asctime)s [%(threadName)s] %(name)s\n %(message)s'
MPD_LINE_ENCODING = u'utf-8'
MPD_LINE_TERMINATOR = u'\n'
MPD_SERVER_HOSTNAME = u'localhost'
MPD_SERVER_PORT = 6600
SPOTIFY_USERNAME = u''
SPOTIFY_PASSWORD = u''
try:
from mopidy.local_settings import *
except ImportError:
pass
|
Add threadName to log format
|
Add threadName to log format
|
Python
|
apache-2.0
|
bencevans/mopidy,pacificIT/mopidy,quartz55/mopidy,SuperStarPL/mopidy,priestd09/mopidy,mokieyue/mopidy,abarisain/mopidy,hkariti/mopidy,swak/mopidy,adamcik/mopidy,quartz55/mopidy,priestd09/mopidy,pacificIT/mopidy,dbrgn/mopidy,jmarsik/mopidy,bencevans/mopidy,tkem/mopidy,abarisain/mopidy,liamw9534/mopidy,hkariti/mopidy,pacificIT/mopidy,jcass77/mopidy,diandiankan/mopidy,pacificIT/mopidy,tkem/mopidy,dbrgn/mopidy,mopidy/mopidy,jodal/mopidy,rawdlite/mopidy,vrs01/mopidy,tkem/mopidy,woutervanwijk/mopidy,ali/mopidy,hkariti/mopidy,vrs01/mopidy,woutervanwijk/mopidy,ali/mopidy,hkariti/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,ZenithDK/mopidy,jmarsik/mopidy,dbrgn/mopidy,dbrgn/mopidy,kingosticks/mopidy,diandiankan/mopidy,kingosticks/mopidy,ZenithDK/mopidy,mopidy/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,jodal/mopidy,kingosticks/mopidy,mokieyue/mopidy,glogiotatidis/mopidy,bencevans/mopidy,jmarsik/mopidy,swak/mopidy,swak/mopidy,rawdlite/mopidy,vrs01/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,bencevans/mopidy,mopidy/mopidy,tkem/mopidy,ali/mopidy,ZenithDK/mopidy,adamcik/mopidy,liamw9534/mopidy,jcass77/mopidy,jcass77/mopidy,jmarsik/mopidy,adamcik/mopidy,rawdlite/mopidy,ali/mopidy,ZenithDK/mopidy,quartz55/mopidy,jodal/mopidy,swak/mopidy,vrs01/mopidy,rawdlite/mopidy,bacontext/mopidy,bacontext/mopidy,diandiankan/mopidy,bacontext/mopidy,quartz55/mopidy,bacontext/mopidy,priestd09/mopidy,diandiankan/mopidy
|
CONSOLE_LOG_FORMAT = u'%(levelname)-8s %(asctime)s %(name)s\n %(message)s'
MPD_LINE_ENCODING = u'utf-8'
MPD_LINE_TERMINATOR = u'\n'
MPD_SERVER_HOSTNAME = u'localhost'
MPD_SERVER_PORT = 6600
SPOTIFY_USERNAME = u''
SPOTIFY_PASSWORD = u''
try:
from mopidy.local_settings import *
except ImportError:
pass
Add threadName to log format
|
CONSOLE_LOG_FORMAT = u'%(levelname)-8s %(asctime)s [%(threadName)s] %(name)s\n %(message)s'
MPD_LINE_ENCODING = u'utf-8'
MPD_LINE_TERMINATOR = u'\n'
MPD_SERVER_HOSTNAME = u'localhost'
MPD_SERVER_PORT = 6600
SPOTIFY_USERNAME = u''
SPOTIFY_PASSWORD = u''
try:
from mopidy.local_settings import *
except ImportError:
pass
|
<commit_before>CONSOLE_LOG_FORMAT = u'%(levelname)-8s %(asctime)s %(name)s\n %(message)s'
MPD_LINE_ENCODING = u'utf-8'
MPD_LINE_TERMINATOR = u'\n'
MPD_SERVER_HOSTNAME = u'localhost'
MPD_SERVER_PORT = 6600
SPOTIFY_USERNAME = u''
SPOTIFY_PASSWORD = u''
try:
from mopidy.local_settings import *
except ImportError:
pass
<commit_msg>Add threadName to log format<commit_after>
|
CONSOLE_LOG_FORMAT = u'%(levelname)-8s %(asctime)s [%(threadName)s] %(name)s\n %(message)s'
MPD_LINE_ENCODING = u'utf-8'
MPD_LINE_TERMINATOR = u'\n'
MPD_SERVER_HOSTNAME = u'localhost'
MPD_SERVER_PORT = 6600
SPOTIFY_USERNAME = u''
SPOTIFY_PASSWORD = u''
try:
from mopidy.local_settings import *
except ImportError:
pass
|
CONSOLE_LOG_FORMAT = u'%(levelname)-8s %(asctime)s %(name)s\n %(message)s'
MPD_LINE_ENCODING = u'utf-8'
MPD_LINE_TERMINATOR = u'\n'
MPD_SERVER_HOSTNAME = u'localhost'
MPD_SERVER_PORT = 6600
SPOTIFY_USERNAME = u''
SPOTIFY_PASSWORD = u''
try:
from mopidy.local_settings import *
except ImportError:
pass
Add threadName to log formatCONSOLE_LOG_FORMAT = u'%(levelname)-8s %(asctime)s [%(threadName)s] %(name)s\n %(message)s'
MPD_LINE_ENCODING = u'utf-8'
MPD_LINE_TERMINATOR = u'\n'
MPD_SERVER_HOSTNAME = u'localhost'
MPD_SERVER_PORT = 6600
SPOTIFY_USERNAME = u''
SPOTIFY_PASSWORD = u''
try:
from mopidy.local_settings import *
except ImportError:
pass
|
<commit_before>CONSOLE_LOG_FORMAT = u'%(levelname)-8s %(asctime)s %(name)s\n %(message)s'
MPD_LINE_ENCODING = u'utf-8'
MPD_LINE_TERMINATOR = u'\n'
MPD_SERVER_HOSTNAME = u'localhost'
MPD_SERVER_PORT = 6600
SPOTIFY_USERNAME = u''
SPOTIFY_PASSWORD = u''
try:
from mopidy.local_settings import *
except ImportError:
pass
<commit_msg>Add threadName to log format<commit_after>CONSOLE_LOG_FORMAT = u'%(levelname)-8s %(asctime)s [%(threadName)s] %(name)s\n %(message)s'
MPD_LINE_ENCODING = u'utf-8'
MPD_LINE_TERMINATOR = u'\n'
MPD_SERVER_HOSTNAME = u'localhost'
MPD_SERVER_PORT = 6600
SPOTIFY_USERNAME = u''
SPOTIFY_PASSWORD = u''
try:
from mopidy.local_settings import *
except ImportError:
pass
|
e91d81a03d57af1fff1b580b1c276fd02f44f587
|
places/migrations/0011_auto_20200712_1733.py
|
places/migrations/0011_auto_20200712_1733.py
|
# Generated by Django 3.0.8 on 2020-07-12 17:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('places', '0010_auto_20200712_0505'),
]
operations = [
migrations.AlterModelOptions(
name='category',
options={'ordering': ['name'], 'verbose_name_plural': 'categories'},
),
migrations.AlterModelOptions(
name='review',
options={'ordering': ['rating']},
),
]
|
# Generated by Django 3.0.8 on 2020-07-12 17:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("places", "0010_auto_20200712_0505"),
]
operations = [
migrations.AlterModelOptions(
name="category",
options={"ordering": ["name"], "verbose_name_plural": "categories"},
),
migrations.AlterModelOptions(name="review", options={"ordering": ["rating"]},),
]
|
Apply black formatting to migration
|
Apply black formatting to migration
|
Python
|
mit
|
huangsam/chowist,huangsam/chowist,huangsam/chowist
|
# Generated by Django 3.0.8 on 2020-07-12 17:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('places', '0010_auto_20200712_0505'),
]
operations = [
migrations.AlterModelOptions(
name='category',
options={'ordering': ['name'], 'verbose_name_plural': 'categories'},
),
migrations.AlterModelOptions(
name='review',
options={'ordering': ['rating']},
),
]
Apply black formatting to migration
|
# Generated by Django 3.0.8 on 2020-07-12 17:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("places", "0010_auto_20200712_0505"),
]
operations = [
migrations.AlterModelOptions(
name="category",
options={"ordering": ["name"], "verbose_name_plural": "categories"},
),
migrations.AlterModelOptions(name="review", options={"ordering": ["rating"]},),
]
|
<commit_before># Generated by Django 3.0.8 on 2020-07-12 17:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('places', '0010_auto_20200712_0505'),
]
operations = [
migrations.AlterModelOptions(
name='category',
options={'ordering': ['name'], 'verbose_name_plural': 'categories'},
),
migrations.AlterModelOptions(
name='review',
options={'ordering': ['rating']},
),
]
<commit_msg>Apply black formatting to migration<commit_after>
|
# Generated by Django 3.0.8 on 2020-07-12 17:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("places", "0010_auto_20200712_0505"),
]
operations = [
migrations.AlterModelOptions(
name="category",
options={"ordering": ["name"], "verbose_name_plural": "categories"},
),
migrations.AlterModelOptions(name="review", options={"ordering": ["rating"]},),
]
|
# Generated by Django 3.0.8 on 2020-07-12 17:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('places', '0010_auto_20200712_0505'),
]
operations = [
migrations.AlterModelOptions(
name='category',
options={'ordering': ['name'], 'verbose_name_plural': 'categories'},
),
migrations.AlterModelOptions(
name='review',
options={'ordering': ['rating']},
),
]
Apply black formatting to migration# Generated by Django 3.0.8 on 2020-07-12 17:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("places", "0010_auto_20200712_0505"),
]
operations = [
migrations.AlterModelOptions(
name="category",
options={"ordering": ["name"], "verbose_name_plural": "categories"},
),
migrations.AlterModelOptions(name="review", options={"ordering": ["rating"]},),
]
|
<commit_before># Generated by Django 3.0.8 on 2020-07-12 17:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('places', '0010_auto_20200712_0505'),
]
operations = [
migrations.AlterModelOptions(
name='category',
options={'ordering': ['name'], 'verbose_name_plural': 'categories'},
),
migrations.AlterModelOptions(
name='review',
options={'ordering': ['rating']},
),
]
<commit_msg>Apply black formatting to migration<commit_after># Generated by Django 3.0.8 on 2020-07-12 17:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("places", "0010_auto_20200712_0505"),
]
operations = [
migrations.AlterModelOptions(
name="category",
options={"ordering": ["name"], "verbose_name_plural": "categories"},
),
migrations.AlterModelOptions(name="review", options={"ordering": ["rating"]},),
]
|
4b716882b3e8e13e591d629a88e5b102c7f008b4
|
mapit/management/commands/mapit_generation_deactivate.py
|
mapit/management/commands/mapit_generation_deactivate.py
|
# This script deactivates a particular generation
from optparse import make_option
from django.core.management.base import BaseCommand
from mapit.models import Generation
class Command(BaseCommand):
help = 'Deactivate a generation'
args = '<GENERATION-ID>'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', dest='commit',
help='Actually update the database'),
make_option('--force', action='store_true', dest='force',
help='Force deactivation, even if it would leave no active generations'))
def handle(self, generation_id, **options):
generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))
if not generation_to_deactivate.active:
raise CommandError, "The generation %s wasn't active" % (generation_id,)
active_generations = Generation.objects.filter(active=True).count()
if active_generations <= 1 and not options['force']:
raise CommandError, "You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force"
generation_to_deactivate.active = False
if options['commit']:
generation_to_deactivate.save()
print "%s - deactivated" % generation_to_deactivate
else:
print "%s - not deactivated, dry run" % generation_to_deactivate
|
# This script deactivates a particular generation
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from mapit.models import Generation
class Command(BaseCommand):
help = 'Deactivate a generation'
args = '<GENERATION-ID>'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', dest='commit',
help='Actually update the database'),
make_option('--force', action='store_true', dest='force',
help='Force deactivation, even if it would leave no active generations'))
def handle(self, generation_id, **options):
generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))
if not generation_to_deactivate.active:
raise CommandError, "The generation %s wasn't active" % (generation_id,)
active_generations = Generation.objects.filter(active=True).count()
if active_generations <= 1 and not options['force']:
raise CommandError, "You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force"
generation_to_deactivate.active = False
if options['commit']:
generation_to_deactivate.save()
print "%s - deactivated" % generation_to_deactivate
else:
print "%s - not deactivated, dry run" % generation_to_deactivate
|
Add the import for CommandError
|
Add the import for CommandError
|
Python
|
agpl-3.0
|
opencorato/mapit,chris48s/mapit,New-Bamboo/mapit,Code4SA/mapit,Code4SA/mapit,chris48s/mapit,New-Bamboo/mapit,Code4SA/mapit,Sinar/mapit,opencorato/mapit,opencorato/mapit,Sinar/mapit,chris48s/mapit
|
# This script deactivates a particular generation
from optparse import make_option
from django.core.management.base import BaseCommand
from mapit.models import Generation
class Command(BaseCommand):
help = 'Deactivate a generation'
args = '<GENERATION-ID>'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', dest='commit',
help='Actually update the database'),
make_option('--force', action='store_true', dest='force',
help='Force deactivation, even if it would leave no active generations'))
def handle(self, generation_id, **options):
generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))
if not generation_to_deactivate.active:
raise CommandError, "The generation %s wasn't active" % (generation_id,)
active_generations = Generation.objects.filter(active=True).count()
if active_generations <= 1 and not options['force']:
raise CommandError, "You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force"
generation_to_deactivate.active = False
if options['commit']:
generation_to_deactivate.save()
print "%s - deactivated" % generation_to_deactivate
else:
print "%s - not deactivated, dry run" % generation_to_deactivate
Add the import for CommandError
|
# This script deactivates a particular generation
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from mapit.models import Generation
class Command(BaseCommand):
help = 'Deactivate a generation'
args = '<GENERATION-ID>'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', dest='commit',
help='Actually update the database'),
make_option('--force', action='store_true', dest='force',
help='Force deactivation, even if it would leave no active generations'))
def handle(self, generation_id, **options):
generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))
if not generation_to_deactivate.active:
raise CommandError, "The generation %s wasn't active" % (generation_id,)
active_generations = Generation.objects.filter(active=True).count()
if active_generations <= 1 and not options['force']:
raise CommandError, "You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force"
generation_to_deactivate.active = False
if options['commit']:
generation_to_deactivate.save()
print "%s - deactivated" % generation_to_deactivate
else:
print "%s - not deactivated, dry run" % generation_to_deactivate
|
<commit_before># This script deactivates a particular generation
from optparse import make_option
from django.core.management.base import BaseCommand
from mapit.models import Generation
class Command(BaseCommand):
help = 'Deactivate a generation'
args = '<GENERATION-ID>'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', dest='commit',
help='Actually update the database'),
make_option('--force', action='store_true', dest='force',
help='Force deactivation, even if it would leave no active generations'))
def handle(self, generation_id, **options):
generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))
if not generation_to_deactivate.active:
raise CommandError, "The generation %s wasn't active" % (generation_id,)
active_generations = Generation.objects.filter(active=True).count()
if active_generations <= 1 and not options['force']:
raise CommandError, "You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force"
generation_to_deactivate.active = False
if options['commit']:
generation_to_deactivate.save()
print "%s - deactivated" % generation_to_deactivate
else:
print "%s - not deactivated, dry run" % generation_to_deactivate
<commit_msg>Add the import for CommandError<commit_after>
|
# This script deactivates a particular generation
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from mapit.models import Generation
class Command(BaseCommand):
help = 'Deactivate a generation'
args = '<GENERATION-ID>'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', dest='commit',
help='Actually update the database'),
make_option('--force', action='store_true', dest='force',
help='Force deactivation, even if it would leave no active generations'))
def handle(self, generation_id, **options):
generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))
if not generation_to_deactivate.active:
raise CommandError, "The generation %s wasn't active" % (generation_id,)
active_generations = Generation.objects.filter(active=True).count()
if active_generations <= 1 and not options['force']:
raise CommandError, "You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force"
generation_to_deactivate.active = False
if options['commit']:
generation_to_deactivate.save()
print "%s - deactivated" % generation_to_deactivate
else:
print "%s - not deactivated, dry run" % generation_to_deactivate
|
# This script deactivates a particular generation
from optparse import make_option
from django.core.management.base import BaseCommand
from mapit.models import Generation
class Command(BaseCommand):
help = 'Deactivate a generation'
args = '<GENERATION-ID>'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', dest='commit',
help='Actually update the database'),
make_option('--force', action='store_true', dest='force',
help='Force deactivation, even if it would leave no active generations'))
def handle(self, generation_id, **options):
generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))
if not generation_to_deactivate.active:
raise CommandError, "The generation %s wasn't active" % (generation_id,)
active_generations = Generation.objects.filter(active=True).count()
if active_generations <= 1 and not options['force']:
raise CommandError, "You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force"
generation_to_deactivate.active = False
if options['commit']:
generation_to_deactivate.save()
print "%s - deactivated" % generation_to_deactivate
else:
print "%s - not deactivated, dry run" % generation_to_deactivate
Add the import for CommandError# This script deactivates a particular generation
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from mapit.models import Generation
class Command(BaseCommand):
help = 'Deactivate a generation'
args = '<GENERATION-ID>'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', dest='commit',
help='Actually update the database'),
make_option('--force', action='store_true', dest='force',
help='Force deactivation, even if it would leave no active generations'))
def handle(self, generation_id, **options):
generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))
if not generation_to_deactivate.active:
raise CommandError, "The generation %s wasn't active" % (generation_id,)
active_generations = Generation.objects.filter(active=True).count()
if active_generations <= 1 and not options['force']:
raise CommandError, "You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force"
generation_to_deactivate.active = False
if options['commit']:
generation_to_deactivate.save()
print "%s - deactivated" % generation_to_deactivate
else:
print "%s - not deactivated, dry run" % generation_to_deactivate
|
<commit_before># This script deactivates a particular generation
from optparse import make_option
from django.core.management.base import BaseCommand
from mapit.models import Generation
class Command(BaseCommand):
help = 'Deactivate a generation'
args = '<GENERATION-ID>'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', dest='commit',
help='Actually update the database'),
make_option('--force', action='store_true', dest='force',
help='Force deactivation, even if it would leave no active generations'))
def handle(self, generation_id, **options):
generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))
if not generation_to_deactivate.active:
raise CommandError, "The generation %s wasn't active" % (generation_id,)
active_generations = Generation.objects.filter(active=True).count()
if active_generations <= 1 and not options['force']:
raise CommandError, "You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force"
generation_to_deactivate.active = False
if options['commit']:
generation_to_deactivate.save()
print "%s - deactivated" % generation_to_deactivate
else:
print "%s - not deactivated, dry run" % generation_to_deactivate
<commit_msg>Add the import for CommandError<commit_after># This script deactivates a particular generation
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from mapit.models import Generation
class Command(BaseCommand):
help = 'Deactivate a generation'
args = '<GENERATION-ID>'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', dest='commit',
help='Actually update the database'),
make_option('--force', action='store_true', dest='force',
help='Force deactivation, even if it would leave no active generations'))
def handle(self, generation_id, **options):
generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))
if not generation_to_deactivate.active:
raise CommandError, "The generation %s wasn't active" % (generation_id,)
active_generations = Generation.objects.filter(active=True).count()
if active_generations <= 1 and not options['force']:
raise CommandError, "You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force"
generation_to_deactivate.active = False
if options['commit']:
generation_to_deactivate.save()
print "%s - deactivated" % generation_to_deactivate
else:
print "%s - not deactivated, dry run" % generation_to_deactivate
|
f98b78fcf37e9d3e200c468b5a0bba25abdd13fd
|
django_lti_tool_provider/tests/urls.py
|
django_lti_tool_provider/tests/urls.py
|
from django.conf.urls import url
from django.contrib.auth.views import login
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='home'),
url('^accounts/login/$', login),
url(r'^lti$', lti_views.LTIView.as_view(), name='lti')
]
|
from django.conf.urls import url
from django.contrib.auth.views import LoginView
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='home'),
url('^accounts/login/$', LoginView.as_view()),
url(r'^lti$', lti_views.LTIView.as_view(), name='lti')
]
|
Replace contrib.auth's "login" view with LoginView.
|
Replace contrib.auth's "login" view with LoginView.
Cf. https://docs.djangoproject.com/en/2.1/releases/1.11/#id2
contrib.auth's login() and logout() function-based views are deprecated in favor of new class-based views LoginView and LogoutView.
|
Python
|
agpl-3.0
|
open-craft/django-lti-tool-provider
|
from django.conf.urls import url
from django.contrib.auth.views import login
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='home'),
url('^accounts/login/$', login),
url(r'^lti$', lti_views.LTIView.as_view(), name='lti')
]
Replace contrib.auth's "login" view with LoginView.
Cf. https://docs.djangoproject.com/en/2.1/releases/1.11/#id2
contrib.auth's login() and logout() function-based views are deprecated in favor of new class-based views LoginView and LogoutView.
|
from django.conf.urls import url
from django.contrib.auth.views import LoginView
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='home'),
url('^accounts/login/$', LoginView.as_view()),
url(r'^lti$', lti_views.LTIView.as_view(), name='lti')
]
|
<commit_before>from django.conf.urls import url
from django.contrib.auth.views import login
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='home'),
url('^accounts/login/$', login),
url(r'^lti$', lti_views.LTIView.as_view(), name='lti')
]
<commit_msg>Replace contrib.auth's "login" view with LoginView.
Cf. https://docs.djangoproject.com/en/2.1/releases/1.11/#id2
contrib.auth's login() and logout() function-based views are deprecated in favor of new class-based views LoginView and LogoutView.<commit_after>
|
from django.conf.urls import url
from django.contrib.auth.views import LoginView
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='home'),
url('^accounts/login/$', LoginView.as_view()),
url(r'^lti$', lti_views.LTIView.as_view(), name='lti')
]
|
from django.conf.urls import url
from django.contrib.auth.views import login
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='home'),
url('^accounts/login/$', login),
url(r'^lti$', lti_views.LTIView.as_view(), name='lti')
]
Replace contrib.auth's "login" view with LoginView.
Cf. https://docs.djangoproject.com/en/2.1/releases/1.11/#id2
contrib.auth's login() and logout() function-based views are deprecated in favor of new class-based views LoginView and LogoutView.from django.conf.urls import url
from django.contrib.auth.views import LoginView
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='home'),
url('^accounts/login/$', LoginView.as_view()),
url(r'^lti$', lti_views.LTIView.as_view(), name='lti')
]
|
<commit_before>from django.conf.urls import url
from django.contrib.auth.views import login
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='home'),
url('^accounts/login/$', login),
url(r'^lti$', lti_views.LTIView.as_view(), name='lti')
]
<commit_msg>Replace contrib.auth's "login" view with LoginView.
Cf. https://docs.djangoproject.com/en/2.1/releases/1.11/#id2
contrib.auth's login() and logout() function-based views are deprecated in favor of new class-based views LoginView and LogoutView.<commit_after>from django.conf.urls import url
from django.contrib.auth.views import LoginView
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='home'),
url('^accounts/login/$', LoginView.as_view()),
url(r'^lti$', lti_views.LTIView.as_view(), name='lti')
]
|
2b4c4a61b7b4853f93c7ac1272905660fce8c3fd
|
aurorawatchuk/snapshot.py
|
aurorawatchuk/snapshot.py
|
from aurorawatchuk import AuroraWatchUK
__author__ = 'Steve Marple'
__version__ = '0.1.2'
__license__ = 'MIT'
class AuroraWatchUK_SS(object):
"""Take a snapshot of the AuroraWatch UK status.
This class mimics the behaviour of the :class:`.aurorawatchuk.AuroraWatchUK` class but its fields are evaluated
just once and cached, at the time first requested. Thus the values it returns are snapshots of the ``status``,
``activity`` and ``description`` fields. This is useful when the information may be required multiple times as
it avoids the possibility that the value could change between uses. If the information is not required then
no network traffic is generated.
For documentation see :class:`.aurorawatchuk.AuroraWatchUK`."""
def __init__(self, *args, **kwargs):
object.__setattr__(self, '_awuk', AuroraWatchUK(*args, **kwargs))
object.__setattr__(self, '_fields', {})
def __getattr__(self, item):
if item[0] != '_':
# Cache this item
if item not in self._fields:
self._fields[item] = getattr(self._awuk, item)
return self._fields[item]
def __setattr__(self, key, value):
if key[0] == '_':
raise AttributeError
else:
return object.__setattr__(self, key, value)
def __delattr__(self, item):
if item[0] == '_':
raise AttributeError
else:
return object.__delattr__(self, item)
|
import aurorawatchuk
__author__ = 'Steve Marple'
__version__ = '0.1.2'
__license__ = 'MIT'
class AuroraWatchUK_SS(object):
"""Take a snapshot of the AuroraWatch UK status.
This class mimics the behaviour of the :class:`.aurorawatchuk.AuroraWatchUK` class but its fields are evaluated
just once and cached, at the time first requested. Thus the values it returns are snapshots of the ``status``,
``activity`` and ``description`` fields. This is useful when the information may be required multiple times as
it avoids the possibility that the value could change between uses. If the information is not required then
no network traffic is generated.
For documentation see :class:`.aurorawatchuk.AuroraWatchUK`."""
def __init__(self, *args, **kwargs):
object.__setattr__(self, '_awuk', aurorawatchuk.AuroraWatchUK(*args, **kwargs))
object.__setattr__(self, '_fields', {})
def __getattr__(self, item):
if item[0] != '_':
# Cache this item
if item not in self._fields:
self._fields[item] = getattr(self._awuk, item)
return self._fields[item]
def __setattr__(self, key, value):
if key[0] == '_':
raise AttributeError
else:
return object.__setattr__(self, key, value)
def __delattr__(self, item):
if item[0] == '_':
raise AttributeError
else:
return object.__delattr__(self, item)
|
Rewrite import to avoid accidental misuse
|
Rewrite import to avoid accidental misuse
Don't import the AuroraWatchUK class into the snapshot namespace, it
enables the original AuroraWatchUK class to be used when it was intended
to use the snapshot version, AuroraWatchUK_SS.
|
Python
|
mit
|
stevemarple/python-aurorawatchuk
|
from aurorawatchuk import AuroraWatchUK
__author__ = 'Steve Marple'
__version__ = '0.1.2'
__license__ = 'MIT'
class AuroraWatchUK_SS(object):
"""Take a snapshot of the AuroraWatch UK status.
This class mimics the behaviour of the :class:`.aurorawatchuk.AuroraWatchUK` class but its fields are evaluated
just once and cached, at the time first requested. Thus the values it returns are snapshots of the ``status``,
``activity`` and ``description`` fields. This is useful when the information may be required multiple times as
it avoids the possibility that the value could change between uses. If the information is not required then
no network traffic is generated.
For documentation see :class:`.aurorawatchuk.AuroraWatchUK`."""
def __init__(self, *args, **kwargs):
object.__setattr__(self, '_awuk', AuroraWatchUK(*args, **kwargs))
object.__setattr__(self, '_fields', {})
def __getattr__(self, item):
if item[0] != '_':
# Cache this item
if item not in self._fields:
self._fields[item] = getattr(self._awuk, item)
return self._fields[item]
def __setattr__(self, key, value):
if key[0] == '_':
raise AttributeError
else:
return object.__setattr__(self, key, value)
def __delattr__(self, item):
if item[0] == '_':
raise AttributeError
else:
return object.__delattr__(self, item)
Rewrite import to avoid accidental misuse
Don't import the AuroraWatchUK class into the snapshot namespace, it
enables the original AuroraWatchUK class to be used when it was intended
to use the snapshot version, AuroraWatchUK_SS.
|
import aurorawatchuk
__author__ = 'Steve Marple'
__version__ = '0.1.2'
__license__ = 'MIT'
class AuroraWatchUK_SS(object):
"""Take a snapshot of the AuroraWatch UK status.
This class mimics the behaviour of the :class:`.aurorawatchuk.AuroraWatchUK` class but its fields are evaluated
just once and cached, at the time first requested. Thus the values it returns are snapshots of the ``status``,
``activity`` and ``description`` fields. This is useful when the information may be required multiple times as
it avoids the possibility that the value could change between uses. If the information is not required then
no network traffic is generated.
For documentation see :class:`.aurorawatchuk.AuroraWatchUK`."""
def __init__(self, *args, **kwargs):
object.__setattr__(self, '_awuk', aurorawatchuk.AuroraWatchUK(*args, **kwargs))
object.__setattr__(self, '_fields', {})
def __getattr__(self, item):
if item[0] != '_':
# Cache this item
if item not in self._fields:
self._fields[item] = getattr(self._awuk, item)
return self._fields[item]
def __setattr__(self, key, value):
if key[0] == '_':
raise AttributeError
else:
return object.__setattr__(self, key, value)
def __delattr__(self, item):
if item[0] == '_':
raise AttributeError
else:
return object.__delattr__(self, item)
|
<commit_before>from aurorawatchuk import AuroraWatchUK
__author__ = 'Steve Marple'
__version__ = '0.1.2'
__license__ = 'MIT'
class AuroraWatchUK_SS(object):
"""Take a snapshot of the AuroraWatch UK status.
This class mimics the behaviour of the :class:`.aurorawatchuk.AuroraWatchUK` class but its fields are evaluated
just once and cached, at the time first requested. Thus the values it returns are snapshots of the ``status``,
``activity`` and ``description`` fields. This is useful when the information may be required multiple times as
it avoids the possibility that the value could change between uses. If the information is not required then
no network traffic is generated.
For documentation see :class:`.aurorawatchuk.AuroraWatchUK`."""
def __init__(self, *args, **kwargs):
object.__setattr__(self, '_awuk', AuroraWatchUK(*args, **kwargs))
object.__setattr__(self, '_fields', {})
def __getattr__(self, item):
if item[0] != '_':
# Cache this item
if item not in self._fields:
self._fields[item] = getattr(self._awuk, item)
return self._fields[item]
def __setattr__(self, key, value):
if key[0] == '_':
raise AttributeError
else:
return object.__setattr__(self, key, value)
def __delattr__(self, item):
if item[0] == '_':
raise AttributeError
else:
return object.__delattr__(self, item)
<commit_msg>Rewrite import to avoid accidental misuse
Don't import the AuroraWatchUK class into the snapshot namespace, it
enables the original AuroraWatchUK class to be used when it was intended
to use the snapshot version, AuroraWatchUK_SS.<commit_after>
|
import aurorawatchuk
__author__ = 'Steve Marple'
__version__ = '0.1.2'
__license__ = 'MIT'
class AuroraWatchUK_SS(object):
"""Take a snapshot of the AuroraWatch UK status.
This class mimics the behaviour of the :class:`.aurorawatchuk.AuroraWatchUK` class but its fields are evaluated
just once and cached, at the time first requested. Thus the values it returns are snapshots of the ``status``,
``activity`` and ``description`` fields. This is useful when the information may be required multiple times as
it avoids the possibility that the value could change between uses. If the information is not required then
no network traffic is generated.
For documentation see :class:`.aurorawatchuk.AuroraWatchUK`."""
def __init__(self, *args, **kwargs):
object.__setattr__(self, '_awuk', aurorawatchuk.AuroraWatchUK(*args, **kwargs))
object.__setattr__(self, '_fields', {})
def __getattr__(self, item):
if item[0] != '_':
# Cache this item
if item not in self._fields:
self._fields[item] = getattr(self._awuk, item)
return self._fields[item]
def __setattr__(self, key, value):
if key[0] == '_':
raise AttributeError
else:
return object.__setattr__(self, key, value)
def __delattr__(self, item):
if item[0] == '_':
raise AttributeError
else:
return object.__delattr__(self, item)
|
from aurorawatchuk import AuroraWatchUK
__author__ = 'Steve Marple'
__version__ = '0.1.2'
__license__ = 'MIT'
class AuroraWatchUK_SS(object):
"""Take a snapshot of the AuroraWatch UK status.
This class mimics the behaviour of the :class:`.aurorawatchuk.AuroraWatchUK` class but its fields are evaluated
just once and cached, at the time first requested. Thus the values it returns are snapshots of the ``status``,
``activity`` and ``description`` fields. This is useful when the information may be required multiple times as
it avoids the possibility that the value could change between uses. If the information is not required then
no network traffic is generated.
For documentation see :class:`.aurorawatchuk.AuroraWatchUK`."""
def __init__(self, *args, **kwargs):
object.__setattr__(self, '_awuk', AuroraWatchUK(*args, **kwargs))
object.__setattr__(self, '_fields', {})
def __getattr__(self, item):
if item[0] != '_':
# Cache this item
if item not in self._fields:
self._fields[item] = getattr(self._awuk, item)
return self._fields[item]
def __setattr__(self, key, value):
if key[0] == '_':
raise AttributeError
else:
return object.__setattr__(self, key, value)
def __delattr__(self, item):
if item[0] == '_':
raise AttributeError
else:
return object.__delattr__(self, item)
Rewrite import to avoid accidental misuse
Don't import the AuroraWatchUK class into the snapshot namespace, it
enables the original AuroraWatchUK class to be used when it was intended
to use the snapshot version, AuroraWatchUK_SS.import aurorawatchuk
__author__ = 'Steve Marple'
__version__ = '0.1.2'
__license__ = 'MIT'
class AuroraWatchUK_SS(object):
"""Take a snapshot of the AuroraWatch UK status.
This class mimics the behaviour of the :class:`.aurorawatchuk.AuroraWatchUK` class but its fields are evaluated
just once and cached, at the time first requested. Thus the values it returns are snapshots of the ``status``,
``activity`` and ``description`` fields. This is useful when the information may be required multiple times as
it avoids the possibility that the value could change between uses. If the information is not required then
no network traffic is generated.
For documentation see :class:`.aurorawatchuk.AuroraWatchUK`."""
def __init__(self, *args, **kwargs):
object.__setattr__(self, '_awuk', aurorawatchuk.AuroraWatchUK(*args, **kwargs))
object.__setattr__(self, '_fields', {})
def __getattr__(self, item):
if item[0] != '_':
# Cache this item
if item not in self._fields:
self._fields[item] = getattr(self._awuk, item)
return self._fields[item]
def __setattr__(self, key, value):
if key[0] == '_':
raise AttributeError
else:
return object.__setattr__(self, key, value)
def __delattr__(self, item):
if item[0] == '_':
raise AttributeError
else:
return object.__delattr__(self, item)
|
<commit_before>from aurorawatchuk import AuroraWatchUK
__author__ = 'Steve Marple'
__version__ = '0.1.2'
__license__ = 'MIT'
class AuroraWatchUK_SS(object):
"""Take a snapshot of the AuroraWatch UK status.
This class mimics the behaviour of the :class:`.aurorawatchuk.AuroraWatchUK` class but its fields are evaluated
just once and cached, at the time first requested. Thus the values it returns are snapshots of the ``status``,
``activity`` and ``description`` fields. This is useful when the information may be required multiple times as
it avoids the possibility that the value could change between uses. If the information is not required then
no network traffic is generated.
For documentation see :class:`.aurorawatchuk.AuroraWatchUK`."""
def __init__(self, *args, **kwargs):
object.__setattr__(self, '_awuk', AuroraWatchUK(*args, **kwargs))
object.__setattr__(self, '_fields', {})
def __getattr__(self, item):
if item[0] != '_':
# Cache this item
if item not in self._fields:
self._fields[item] = getattr(self._awuk, item)
return self._fields[item]
def __setattr__(self, key, value):
if key[0] == '_':
raise AttributeError
else:
return object.__setattr__(self, key, value)
def __delattr__(self, item):
if item[0] == '_':
raise AttributeError
else:
return object.__delattr__(self, item)
<commit_msg>Rewrite import to avoid accidental misuse
Don't import the AuroraWatchUK class into the snapshot namespace, it
enables the original AuroraWatchUK class to be used when it was intended
to use the snapshot version, AuroraWatchUK_SS.<commit_after>import aurorawatchuk
__author__ = 'Steve Marple'
__version__ = '0.1.2'
__license__ = 'MIT'
class AuroraWatchUK_SS(object):
"""Take a snapshot of the AuroraWatch UK status.
This class mimics the behaviour of the :class:`.aurorawatchuk.AuroraWatchUK` class but its fields are evaluated
just once and cached, at the time first requested. Thus the values it returns are snapshots of the ``status``,
``activity`` and ``description`` fields. This is useful when the information may be required multiple times as
it avoids the possibility that the value could change between uses. If the information is not required then
no network traffic is generated.
For documentation see :class:`.aurorawatchuk.AuroraWatchUK`."""
def __init__(self, *args, **kwargs):
object.__setattr__(self, '_awuk', aurorawatchuk.AuroraWatchUK(*args, **kwargs))
object.__setattr__(self, '_fields', {})
def __getattr__(self, item):
if item[0] != '_':
# Cache this item
if item not in self._fields:
self._fields[item] = getattr(self._awuk, item)
return self._fields[item]
def __setattr__(self, key, value):
if key[0] == '_':
raise AttributeError
else:
return object.__setattr__(self, key, value)
def __delattr__(self, item):
if item[0] == '_':
raise AttributeError
else:
return object.__delattr__(self, item)
|
e1f49afe5d4aeae2306349d52df4295944598dc1
|
thinglang/parser/tokens/logic.py
|
thinglang/parser/tokens/logic.py
|
from thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.value = slice
def describe(self):
return 'if {}'.format(self.value)
def evaluate(self, stack):
return self.value.evaluate(stack)
class UnconditionalElse(BaseToken):
pass
class ConditionalElse(Conditional):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
|
from thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.value = slice
def describe(self):
return 'if {}'.format(self.value)
def evaluate(self, stack):
return self.value.evaluate(stack)
class ElseBranchInterface(object):
pass
class UnconditionalElse(BaseToken, ElseBranchInterface):
pass
class ConditionalElse(Conditional, ElseBranchInterface):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
|
Update interface signatures for else branches
|
Update interface signatures for else branches
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
from thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.value = slice
def describe(self):
return 'if {}'.format(self.value)
def evaluate(self, stack):
return self.value.evaluate(stack)
class UnconditionalElse(BaseToken):
pass
class ConditionalElse(Conditional):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
Update interface signatures for else branches
|
from thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.value = slice
def describe(self):
return 'if {}'.format(self.value)
def evaluate(self, stack):
return self.value.evaluate(stack)
class ElseBranchInterface(object):
pass
class UnconditionalElse(BaseToken, ElseBranchInterface):
pass
class ConditionalElse(Conditional, ElseBranchInterface):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
|
<commit_before>from thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.value = slice
def describe(self):
return 'if {}'.format(self.value)
def evaluate(self, stack):
return self.value.evaluate(stack)
class UnconditionalElse(BaseToken):
pass
class ConditionalElse(Conditional):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
<commit_msg>Update interface signatures for else branches<commit_after>
|
from thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.value = slice
def describe(self):
return 'if {}'.format(self.value)
def evaluate(self, stack):
return self.value.evaluate(stack)
class ElseBranchInterface(object):
pass
class UnconditionalElse(BaseToken, ElseBranchInterface):
pass
class ConditionalElse(Conditional, ElseBranchInterface):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
|
from thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.value = slice
def describe(self):
return 'if {}'.format(self.value)
def evaluate(self, stack):
return self.value.evaluate(stack)
class UnconditionalElse(BaseToken):
pass
class ConditionalElse(Conditional):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
Update interface signatures for else branchesfrom thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.value = slice
def describe(self):
return 'if {}'.format(self.value)
def evaluate(self, stack):
return self.value.evaluate(stack)
class ElseBranchInterface(object):
pass
class UnconditionalElse(BaseToken, ElseBranchInterface):
pass
class ConditionalElse(Conditional, ElseBranchInterface):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
|
<commit_before>from thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.value = slice
def describe(self):
return 'if {}'.format(self.value)
def evaluate(self, stack):
return self.value.evaluate(stack)
class UnconditionalElse(BaseToken):
pass
class ConditionalElse(Conditional):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
<commit_msg>Update interface signatures for else branches<commit_after>from thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.value = slice
def describe(self):
return 'if {}'.format(self.value)
def evaluate(self, stack):
return self.value.evaluate(stack)
class ElseBranchInterface(object):
pass
class UnconditionalElse(BaseToken, ElseBranchInterface):
pass
class ConditionalElse(Conditional, ElseBranchInterface):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
|
efdfcccf57b294d529039095c2c71401546b3519
|
elephas/utils/functional_utils.py
|
elephas/utils/functional_utils.py
|
from __future__ import absolute_import
import numpy as np
def add_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x+y)
return res
def get_neutral(array):
res = []
for x in array:
res.append(np.zeros_like(x))
return res
def divide_by(array_list, num_workers):
for i in xrange(len(array_list)):
array_list[i] /= num_workers
return array_list
|
from __future__ import absolute_import
import numpy as np
def add_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x+y)
return res
def subtract_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x-y)
return res
def get_neutral(array):
res = []
for x in array:
res.append(np.zeros_like(x))
return res
def divide_by(array_list, num_workers):
for i in xrange(len(array_list)):
array_list[i] /= num_workers
return array_list
|
Subtract two sets of parameters
|
Subtract two sets of parameters
|
Python
|
mit
|
FighterLYL/elephas,maxpumperla/elephas,CheMcCandless/elephas,daishichao/elephas,maxpumperla/elephas,aarzhaev/elephas,darcy0511/elephas
|
from __future__ import absolute_import
import numpy as np
def add_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x+y)
return res
def get_neutral(array):
res = []
for x in array:
res.append(np.zeros_like(x))
return res
def divide_by(array_list, num_workers):
for i in xrange(len(array_list)):
array_list[i] /= num_workers
return array_list
Subtract two sets of parameters
|
from __future__ import absolute_import
import numpy as np
def add_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x+y)
return res
def subtract_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x-y)
return res
def get_neutral(array):
res = []
for x in array:
res.append(np.zeros_like(x))
return res
def divide_by(array_list, num_workers):
for i in xrange(len(array_list)):
array_list[i] /= num_workers
return array_list
|
<commit_before>from __future__ import absolute_import
import numpy as np
def add_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x+y)
return res
def get_neutral(array):
res = []
for x in array:
res.append(np.zeros_like(x))
return res
def divide_by(array_list, num_workers):
for i in xrange(len(array_list)):
array_list[i] /= num_workers
return array_list
<commit_msg>Subtract two sets of parameters<commit_after>
|
from __future__ import absolute_import
import numpy as np
def add_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x+y)
return res
def subtract_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x-y)
return res
def get_neutral(array):
res = []
for x in array:
res.append(np.zeros_like(x))
return res
def divide_by(array_list, num_workers):
for i in xrange(len(array_list)):
array_list[i] /= num_workers
return array_list
|
from __future__ import absolute_import
import numpy as np
def add_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x+y)
return res
def get_neutral(array):
res = []
for x in array:
res.append(np.zeros_like(x))
return res
def divide_by(array_list, num_workers):
for i in xrange(len(array_list)):
array_list[i] /= num_workers
return array_list
Subtract two sets of parametersfrom __future__ import absolute_import
import numpy as np
def add_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x+y)
return res
def subtract_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x-y)
return res
def get_neutral(array):
res = []
for x in array:
res.append(np.zeros_like(x))
return res
def divide_by(array_list, num_workers):
for i in xrange(len(array_list)):
array_list[i] /= num_workers
return array_list
|
<commit_before>from __future__ import absolute_import
import numpy as np
def add_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x+y)
return res
def get_neutral(array):
res = []
for x in array:
res.append(np.zeros_like(x))
return res
def divide_by(array_list, num_workers):
for i in xrange(len(array_list)):
array_list[i] /= num_workers
return array_list
<commit_msg>Subtract two sets of parameters<commit_after>from __future__ import absolute_import
import numpy as np
def add_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x+y)
return res
def subtract_params(p1, p2):
res = []
for x,y in zip(p1,p2):
res.append(x-y)
return res
def get_neutral(array):
res = []
for x in array:
res.append(np.zeros_like(x))
return res
def divide_by(array_list, num_workers):
for i in xrange(len(array_list)):
array_list[i] /= num_workers
return array_list
|
634cfafd7470c40c574f315c3302158ea3232bc9
|
example/achillesexample/blocks.py
|
example/achillesexample/blocks.py
|
from achilles import blocks, tables
from time import sleep
from models import Person
register = blocks.Library('example')
COUNTER = 0
@register.block(template_name='blocks/message.html')
def counter():
global COUNTER
COUNTER += 1
return {
'message': 'Block loaded %s times' % COUNTER,
}
@register.block(template_name='blocks/message.html')
def slow():
sleep(1)
return {
'message':'This block was loaded after page was loaded!',
}
@register.block('mytable')
class Table(tables.Table):
first_name = tables.Column()
last_name = tables.Column()
call_example = tables.ActionColumn(action='example:miau_person',
verbose_name='miau')
model = Person
|
from achilles import blocks, tables
from time import sleep
from models import Person
register = blocks.Library('example')
COUNTER = 0
@register.block(template_name='blocks/message.html')
def counter():
global COUNTER
COUNTER += 1
return {
'message': 'Block loaded %s times' % COUNTER,
}
@register.block(template_name='blocks/message.html')
def slow():
sleep(1)
return {
'message':'This block was loaded after page was loaded!',
}
@register.block('mytable')
class Table(tables.Table):
first_name = tables.Column(verbose_name='First name')
last_name = tables.Column(verbose_name='First name')
call_example = tables.ActionColumn(action='example:miau_person',
verbose_name='Miauify')
model = Person
|
Use verbose names in example table
|
Use verbose names in example table
|
Python
|
apache-2.0
|
exekias/django-achilles,exekias/django-achilles
|
from achilles import blocks, tables
from time import sleep
from models import Person
register = blocks.Library('example')
COUNTER = 0
@register.block(template_name='blocks/message.html')
def counter():
global COUNTER
COUNTER += 1
return {
'message': 'Block loaded %s times' % COUNTER,
}
@register.block(template_name='blocks/message.html')
def slow():
sleep(1)
return {
'message':'This block was loaded after page was loaded!',
}
@register.block('mytable')
class Table(tables.Table):
first_name = tables.Column()
last_name = tables.Column()
call_example = tables.ActionColumn(action='example:miau_person',
verbose_name='miau')
model = Person
Use verbose names in example table
|
from achilles import blocks, tables
from time import sleep
from models import Person
register = blocks.Library('example')
COUNTER = 0
@register.block(template_name='blocks/message.html')
def counter():
global COUNTER
COUNTER += 1
return {
'message': 'Block loaded %s times' % COUNTER,
}
@register.block(template_name='blocks/message.html')
def slow():
sleep(1)
return {
'message':'This block was loaded after page was loaded!',
}
@register.block('mytable')
class Table(tables.Table):
first_name = tables.Column(verbose_name='First name')
last_name = tables.Column(verbose_name='First name')
call_example = tables.ActionColumn(action='example:miau_person',
verbose_name='Miauify')
model = Person
|
<commit_before>from achilles import blocks, tables
from time import sleep
from models import Person
register = blocks.Library('example')
COUNTER = 0
@register.block(template_name='blocks/message.html')
def counter():
global COUNTER
COUNTER += 1
return {
'message': 'Block loaded %s times' % COUNTER,
}
@register.block(template_name='blocks/message.html')
def slow():
sleep(1)
return {
'message':'This block was loaded after page was loaded!',
}
@register.block('mytable')
class Table(tables.Table):
first_name = tables.Column()
last_name = tables.Column()
call_example = tables.ActionColumn(action='example:miau_person',
verbose_name='miau')
model = Person
<commit_msg>Use verbose names in example table<commit_after>
|
from achilles import blocks, tables
from time import sleep
from models import Person
register = blocks.Library('example')
COUNTER = 0
@register.block(template_name='blocks/message.html')
def counter():
global COUNTER
COUNTER += 1
return {
'message': 'Block loaded %s times' % COUNTER,
}
@register.block(template_name='blocks/message.html')
def slow():
sleep(1)
return {
'message':'This block was loaded after page was loaded!',
}
@register.block('mytable')
class Table(tables.Table):
first_name = tables.Column(verbose_name='First name')
last_name = tables.Column(verbose_name='First name')
call_example = tables.ActionColumn(action='example:miau_person',
verbose_name='Miauify')
model = Person
|
from achilles import blocks, tables
from time import sleep
from models import Person
register = blocks.Library('example')
COUNTER = 0
@register.block(template_name='blocks/message.html')
def counter():
global COUNTER
COUNTER += 1
return {
'message': 'Block loaded %s times' % COUNTER,
}
@register.block(template_name='blocks/message.html')
def slow():
sleep(1)
return {
'message':'This block was loaded after page was loaded!',
}
@register.block('mytable')
class Table(tables.Table):
first_name = tables.Column()
last_name = tables.Column()
call_example = tables.ActionColumn(action='example:miau_person',
verbose_name='miau')
model = Person
Use verbose names in example tablefrom achilles import blocks, tables
from time import sleep
from models import Person
register = blocks.Library('example')
COUNTER = 0
@register.block(template_name='blocks/message.html')
def counter():
global COUNTER
COUNTER += 1
return {
'message': 'Block loaded %s times' % COUNTER,
}
@register.block(template_name='blocks/message.html')
def slow():
sleep(1)
return {
'message':'This block was loaded after page was loaded!',
}
@register.block('mytable')
class Table(tables.Table):
first_name = tables.Column(verbose_name='First name')
last_name = tables.Column(verbose_name='First name')
call_example = tables.ActionColumn(action='example:miau_person',
verbose_name='Miauify')
model = Person
|
<commit_before>from achilles import blocks, tables
from time import sleep
from models import Person
register = blocks.Library('example')
COUNTER = 0
@register.block(template_name='blocks/message.html')
def counter():
global COUNTER
COUNTER += 1
return {
'message': 'Block loaded %s times' % COUNTER,
}
@register.block(template_name='blocks/message.html')
def slow():
sleep(1)
return {
'message':'This block was loaded after page was loaded!',
}
@register.block('mytable')
class Table(tables.Table):
first_name = tables.Column()
last_name = tables.Column()
call_example = tables.ActionColumn(action='example:miau_person',
verbose_name='miau')
model = Person
<commit_msg>Use verbose names in example table<commit_after>from achilles import blocks, tables
from time import sleep
from models import Person
register = blocks.Library('example')
COUNTER = 0
@register.block(template_name='blocks/message.html')
def counter():
global COUNTER
COUNTER += 1
return {
'message': 'Block loaded %s times' % COUNTER,
}
@register.block(template_name='blocks/message.html')
def slow():
sleep(1)
return {
'message':'This block was loaded after page was loaded!',
}
@register.block('mytable')
class Table(tables.Table):
first_name = tables.Column(verbose_name='First name')
last_name = tables.Column(verbose_name='First name')
call_example = tables.ActionColumn(action='example:miau_person',
verbose_name='Miauify')
model = Person
|
21889635640e0ca5e63fb7351b745e29b8748515
|
labmanager/utils.py
|
labmanager/utils.py
|
import os
import sys
from werkzeug.urls import url_quote, url_unquote
from werkzeug.routing import PathConverter
def data_filename(fname):
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if os.path.exists(os.path.join(basedir, 'labmanager_data', fname)):
return os.path.join(basedir, 'labmanager_data', fname)
if os.path.exists(os.path.join(sys.prefix, 'labmanager_data', fname)):
return os.path.join(sys.prefix, 'labmanager_data', fname)
elif os.path.exists(os.path.join(basedir, fname)):
return os.path.abspath(os.path.join(basedir, fname))
else:
return fname
class FullyQuotedUrlConverter(PathConverter):
def to_python(self, value):
return url_unquote(value)
def to_url(self, value):
return url_quote(value, self.map.charset, safe='')
|
import os
import sys
from werkzeug.urls import url_quote, url_unquote
from werkzeug.routing import PathConverter
def data_filename(fname):
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if os.path.exists(os.path.join(basedir, 'labmanager_data', fname)):
return os.path.join(basedir, 'labmanager_data', fname)
if os.path.exists(os.path.join(sys.prefix, 'labmanager_data', fname)):
return os.path.join(sys.prefix, 'labmanager_data', fname)
elif os.path.exists(os.path.join(basedir, fname)):
return os.path.abspath(os.path.join(basedir, fname))
else:
return fname
class FullyQuotedUrlConverter(PathConverter):
def to_python(self, value):
return url_unquote(url_unquote(url_unquote(value)))
def to_url(self, value):
return url_quote(url_quote(url_quote(value, self.map.charset, safe=''), self.map.charset, safe=''), self.map.charset, safe='')
|
Fix issue with URL routes
|
Fix issue with URL routes
|
Python
|
bsd-2-clause
|
gateway4labs/labmanager,gateway4labs/labmanager,labsland/labmanager,labsland/labmanager,morelab/labmanager,go-lab/labmanager,morelab/labmanager,porduna/labmanager,morelab/labmanager,go-lab/labmanager,go-lab/labmanager,porduna/labmanager,morelab/labmanager,porduna/labmanager,labsland/labmanager,labsland/labmanager,porduna/labmanager,go-lab/labmanager,gateway4labs/labmanager,gateway4labs/labmanager
|
import os
import sys
from werkzeug.urls import url_quote, url_unquote
from werkzeug.routing import PathConverter
def data_filename(fname):
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if os.path.exists(os.path.join(basedir, 'labmanager_data', fname)):
return os.path.join(basedir, 'labmanager_data', fname)
if os.path.exists(os.path.join(sys.prefix, 'labmanager_data', fname)):
return os.path.join(sys.prefix, 'labmanager_data', fname)
elif os.path.exists(os.path.join(basedir, fname)):
return os.path.abspath(os.path.join(basedir, fname))
else:
return fname
class FullyQuotedUrlConverter(PathConverter):
def to_python(self, value):
return url_unquote(value)
def to_url(self, value):
return url_quote(value, self.map.charset, safe='')
Fix issue with URL routes
|
import os
import sys
from werkzeug.urls import url_quote, url_unquote
from werkzeug.routing import PathConverter
def data_filename(fname):
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if os.path.exists(os.path.join(basedir, 'labmanager_data', fname)):
return os.path.join(basedir, 'labmanager_data', fname)
if os.path.exists(os.path.join(sys.prefix, 'labmanager_data', fname)):
return os.path.join(sys.prefix, 'labmanager_data', fname)
elif os.path.exists(os.path.join(basedir, fname)):
return os.path.abspath(os.path.join(basedir, fname))
else:
return fname
class FullyQuotedUrlConverter(PathConverter):
def to_python(self, value):
return url_unquote(url_unquote(url_unquote(value)))
def to_url(self, value):
return url_quote(url_quote(url_quote(value, self.map.charset, safe=''), self.map.charset, safe=''), self.map.charset, safe='')
|
<commit_before>import os
import sys
from werkzeug.urls import url_quote, url_unquote
from werkzeug.routing import PathConverter
def data_filename(fname):
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if os.path.exists(os.path.join(basedir, 'labmanager_data', fname)):
return os.path.join(basedir, 'labmanager_data', fname)
if os.path.exists(os.path.join(sys.prefix, 'labmanager_data', fname)):
return os.path.join(sys.prefix, 'labmanager_data', fname)
elif os.path.exists(os.path.join(basedir, fname)):
return os.path.abspath(os.path.join(basedir, fname))
else:
return fname
class FullyQuotedUrlConverter(PathConverter):
def to_python(self, value):
return url_unquote(value)
def to_url(self, value):
return url_quote(value, self.map.charset, safe='')
<commit_msg>Fix issue with URL routes<commit_after>
|
import os
import sys
from werkzeug.urls import url_quote, url_unquote
from werkzeug.routing import PathConverter
def data_filename(fname):
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if os.path.exists(os.path.join(basedir, 'labmanager_data', fname)):
return os.path.join(basedir, 'labmanager_data', fname)
if os.path.exists(os.path.join(sys.prefix, 'labmanager_data', fname)):
return os.path.join(sys.prefix, 'labmanager_data', fname)
elif os.path.exists(os.path.join(basedir, fname)):
return os.path.abspath(os.path.join(basedir, fname))
else:
return fname
class FullyQuotedUrlConverter(PathConverter):
def to_python(self, value):
return url_unquote(url_unquote(url_unquote(value)))
def to_url(self, value):
return url_quote(url_quote(url_quote(value, self.map.charset, safe=''), self.map.charset, safe=''), self.map.charset, safe='')
|
import os
import sys
from werkzeug.urls import url_quote, url_unquote
from werkzeug.routing import PathConverter
def data_filename(fname):
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if os.path.exists(os.path.join(basedir, 'labmanager_data', fname)):
return os.path.join(basedir, 'labmanager_data', fname)
if os.path.exists(os.path.join(sys.prefix, 'labmanager_data', fname)):
return os.path.join(sys.prefix, 'labmanager_data', fname)
elif os.path.exists(os.path.join(basedir, fname)):
return os.path.abspath(os.path.join(basedir, fname))
else:
return fname
class FullyQuotedUrlConverter(PathConverter):
def to_python(self, value):
return url_unquote(value)
def to_url(self, value):
return url_quote(value, self.map.charset, safe='')
Fix issue with URL routesimport os
import sys
from werkzeug.urls import url_quote, url_unquote
from werkzeug.routing import PathConverter
def data_filename(fname):
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if os.path.exists(os.path.join(basedir, 'labmanager_data', fname)):
return os.path.join(basedir, 'labmanager_data', fname)
if os.path.exists(os.path.join(sys.prefix, 'labmanager_data', fname)):
return os.path.join(sys.prefix, 'labmanager_data', fname)
elif os.path.exists(os.path.join(basedir, fname)):
return os.path.abspath(os.path.join(basedir, fname))
else:
return fname
class FullyQuotedUrlConverter(PathConverter):
def to_python(self, value):
return url_unquote(url_unquote(url_unquote(value)))
def to_url(self, value):
return url_quote(url_quote(url_quote(value, self.map.charset, safe=''), self.map.charset, safe=''), self.map.charset, safe='')
|
<commit_before>import os
import sys
from werkzeug.urls import url_quote, url_unquote
from werkzeug.routing import PathConverter
def data_filename(fname):
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if os.path.exists(os.path.join(basedir, 'labmanager_data', fname)):
return os.path.join(basedir, 'labmanager_data', fname)
if os.path.exists(os.path.join(sys.prefix, 'labmanager_data', fname)):
return os.path.join(sys.prefix, 'labmanager_data', fname)
elif os.path.exists(os.path.join(basedir, fname)):
return os.path.abspath(os.path.join(basedir, fname))
else:
return fname
class FullyQuotedUrlConverter(PathConverter):
def to_python(self, value):
return url_unquote(value)
def to_url(self, value):
return url_quote(value, self.map.charset, safe='')
<commit_msg>Fix issue with URL routes<commit_after>import os
import sys
from werkzeug.urls import url_quote, url_unquote
from werkzeug.routing import PathConverter
def data_filename(fname):
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if os.path.exists(os.path.join(basedir, 'labmanager_data', fname)):
return os.path.join(basedir, 'labmanager_data', fname)
if os.path.exists(os.path.join(sys.prefix, 'labmanager_data', fname)):
return os.path.join(sys.prefix, 'labmanager_data', fname)
elif os.path.exists(os.path.join(basedir, fname)):
return os.path.abspath(os.path.join(basedir, fname))
else:
return fname
class FullyQuotedUrlConverter(PathConverter):
def to_python(self, value):
return url_unquote(url_unquote(url_unquote(value)))
def to_url(self, value):
return url_quote(url_quote(url_quote(value, self.map.charset, safe=''), self.map.charset, safe=''), self.map.charset, safe='')
|
dac003dc60034cf3dce6829f90ccec30593a34b2
|
ingestors/worker.py
|
ingestors/worker.py
|
import logging
from followthemoney import model
from servicelayer.worker import Worker
from ingestors.manager import Manager
log = logging.getLogger(__name__)
class IngestWorker(Worker):
"""A long running task runner that uses Redis as a task queue"""
def dispatch_next(self, task, entities):
next_stage = task.context.get('next_stage')
if next_stage is None:
return
stage = task.job.get_stage(next_stage)
log.info("Sending %s entities to: %s", len(entities), next_stage)
stage.queue({'entity_ids': entities}, task.context)
def handle(self, task):
manager = Manager(task.stage, task.context)
entity = model.get_proxy(task.payload)
log.debug("Ingest: %r", entity)
manager.ingest_entity(entity)
manager.close()
self.dispatch_next(task, manager.emitted)
|
import logging
from followthemoney import model
from servicelayer.worker import Worker
from ingestors.manager import Manager
log = logging.getLogger(__name__)
class IngestWorker(Worker):
"""A long running task runner that uses Redis as a task queue"""
def dispatch_next(self, task, entities):
next_stage = task.context.get('next_stage')
if next_stage is None or not len(entities):
return
stage = task.job.get_stage(next_stage)
log.info("Sending %s entities to: %s", len(entities), next_stage)
stage.queue({'entity_ids': entities}, task.context)
def handle(self, task):
manager = Manager(task.stage, task.context)
entity = model.get_proxy(task.payload)
log.debug("Ingest: %r", entity)
manager.ingest_entity(entity)
manager.close()
self.dispatch_next(task, manager.emitted)
|
Switch to a mutation timestamp
|
Switch to a mutation timestamp
|
Python
|
mit
|
alephdata/ingestors
|
import logging
from followthemoney import model
from servicelayer.worker import Worker
from ingestors.manager import Manager
log = logging.getLogger(__name__)
class IngestWorker(Worker):
"""A long running task runner that uses Redis as a task queue"""
def dispatch_next(self, task, entities):
next_stage = task.context.get('next_stage')
if next_stage is None:
return
stage = task.job.get_stage(next_stage)
log.info("Sending %s entities to: %s", len(entities), next_stage)
stage.queue({'entity_ids': entities}, task.context)
def handle(self, task):
manager = Manager(task.stage, task.context)
entity = model.get_proxy(task.payload)
log.debug("Ingest: %r", entity)
manager.ingest_entity(entity)
manager.close()
self.dispatch_next(task, manager.emitted)
Switch to a mutation timestamp
|
import logging
from followthemoney import model
from servicelayer.worker import Worker
from ingestors.manager import Manager
log = logging.getLogger(__name__)
class IngestWorker(Worker):
"""A long running task runner that uses Redis as a task queue"""
def dispatch_next(self, task, entities):
next_stage = task.context.get('next_stage')
if next_stage is None or not len(entities):
return
stage = task.job.get_stage(next_stage)
log.info("Sending %s entities to: %s", len(entities), next_stage)
stage.queue({'entity_ids': entities}, task.context)
def handle(self, task):
manager = Manager(task.stage, task.context)
entity = model.get_proxy(task.payload)
log.debug("Ingest: %r", entity)
manager.ingest_entity(entity)
manager.close()
self.dispatch_next(task, manager.emitted)
|
<commit_before>import logging
from followthemoney import model
from servicelayer.worker import Worker
from ingestors.manager import Manager
log = logging.getLogger(__name__)
class IngestWorker(Worker):
"""A long running task runner that uses Redis as a task queue"""
def dispatch_next(self, task, entities):
next_stage = task.context.get('next_stage')
if next_stage is None:
return
stage = task.job.get_stage(next_stage)
log.info("Sending %s entities to: %s", len(entities), next_stage)
stage.queue({'entity_ids': entities}, task.context)
def handle(self, task):
manager = Manager(task.stage, task.context)
entity = model.get_proxy(task.payload)
log.debug("Ingest: %r", entity)
manager.ingest_entity(entity)
manager.close()
self.dispatch_next(task, manager.emitted)
<commit_msg>Switch to a mutation timestamp<commit_after>
|
import logging
from followthemoney import model
from servicelayer.worker import Worker
from ingestors.manager import Manager
log = logging.getLogger(__name__)
class IngestWorker(Worker):
"""A long running task runner that uses Redis as a task queue"""
def dispatch_next(self, task, entities):
next_stage = task.context.get('next_stage')
if next_stage is None or not len(entities):
return
stage = task.job.get_stage(next_stage)
log.info("Sending %s entities to: %s", len(entities), next_stage)
stage.queue({'entity_ids': entities}, task.context)
def handle(self, task):
manager = Manager(task.stage, task.context)
entity = model.get_proxy(task.payload)
log.debug("Ingest: %r", entity)
manager.ingest_entity(entity)
manager.close()
self.dispatch_next(task, manager.emitted)
|
import logging
from followthemoney import model
from servicelayer.worker import Worker
from ingestors.manager import Manager
log = logging.getLogger(__name__)
class IngestWorker(Worker):
"""A long running task runner that uses Redis as a task queue"""
def dispatch_next(self, task, entities):
next_stage = task.context.get('next_stage')
if next_stage is None:
return
stage = task.job.get_stage(next_stage)
log.info("Sending %s entities to: %s", len(entities), next_stage)
stage.queue({'entity_ids': entities}, task.context)
def handle(self, task):
manager = Manager(task.stage, task.context)
entity = model.get_proxy(task.payload)
log.debug("Ingest: %r", entity)
manager.ingest_entity(entity)
manager.close()
self.dispatch_next(task, manager.emitted)
Switch to a mutation timestampimport logging
from followthemoney import model
from servicelayer.worker import Worker
from ingestors.manager import Manager
log = logging.getLogger(__name__)
class IngestWorker(Worker):
"""A long running task runner that uses Redis as a task queue"""
def dispatch_next(self, task, entities):
next_stage = task.context.get('next_stage')
if next_stage is None or not len(entities):
return
stage = task.job.get_stage(next_stage)
log.info("Sending %s entities to: %s", len(entities), next_stage)
stage.queue({'entity_ids': entities}, task.context)
def handle(self, task):
manager = Manager(task.stage, task.context)
entity = model.get_proxy(task.payload)
log.debug("Ingest: %r", entity)
manager.ingest_entity(entity)
manager.close()
self.dispatch_next(task, manager.emitted)
|
<commit_before>import logging
from followthemoney import model
from servicelayer.worker import Worker
from ingestors.manager import Manager
log = logging.getLogger(__name__)
class IngestWorker(Worker):
"""A long running task runner that uses Redis as a task queue"""
def dispatch_next(self, task, entities):
next_stage = task.context.get('next_stage')
if next_stage is None:
return
stage = task.job.get_stage(next_stage)
log.info("Sending %s entities to: %s", len(entities), next_stage)
stage.queue({'entity_ids': entities}, task.context)
def handle(self, task):
manager = Manager(task.stage, task.context)
entity = model.get_proxy(task.payload)
log.debug("Ingest: %r", entity)
manager.ingest_entity(entity)
manager.close()
self.dispatch_next(task, manager.emitted)
<commit_msg>Switch to a mutation timestamp<commit_after>import logging
from followthemoney import model
from servicelayer.worker import Worker
from ingestors.manager import Manager
log = logging.getLogger(__name__)
class IngestWorker(Worker):
"""A long running task runner that uses Redis as a task queue"""
def dispatch_next(self, task, entities):
next_stage = task.context.get('next_stage')
if next_stage is None or not len(entities):
return
stage = task.job.get_stage(next_stage)
log.info("Sending %s entities to: %s", len(entities), next_stage)
stage.queue({'entity_ids': entities}, task.context)
def handle(self, task):
manager = Manager(task.stage, task.context)
entity = model.get_proxy(task.payload)
log.debug("Ingest: %r", entity)
manager.ingest_entity(entity)
manager.close()
self.dispatch_next(task, manager.emitted)
|
48f9b32bfe8a222cbe8afdb1e4f0d63bc2ac9a68
|
nova/conf/cache.py
|
nova/conf/cache.py
|
# needs:fix_opt_description
# needs:check_deprecation_status
# needs:check_opt_group_and_type
# needs:fix_opt_description_indentation
# needs:fix_opt_registration_consistency
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_cache import core
def register_opts(conf):
core.configure(conf)
def list_opts():
return core._opts.list_opts()
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_cache import core
def register_opts(conf):
core.configure(conf)
def list_opts():
return core._opts.list_opts()
|
Update tags for Cache config option
|
Update tags for Cache config option
Updated tags for config options consistency [1].
[1] https://wiki.openstack.org/wiki/ConfigOptionsConsistency
Change-Id: I3f82d2b4d60028221bc861bfe0fe5dff6efd971f
Implements: Blueprint centralize-config-options-newton
|
Python
|
apache-2.0
|
vmturbo/nova,cloudbase/nova,hanlind/nova,sebrandon1/nova,Juniper/nova,jianghuaw/nova,rajalokan/nova,rahulunair/nova,alaski/nova,Juniper/nova,mahak/nova,Juniper/nova,mikalstill/nova,jianghuaw/nova,rahulunair/nova,sebrandon1/nova,gooddata/openstack-nova,alaski/nova,klmitch/nova,openstack/nova,Juniper/nova,klmitch/nova,rajalokan/nova,rajalokan/nova,hanlind/nova,vmturbo/nova,phenoxim/nova,cloudbase/nova,hanlind/nova,vmturbo/nova,mahak/nova,klmitch/nova,gooddata/openstack-nova,mahak/nova,gooddata/openstack-nova,vmturbo/nova,cloudbase/nova,jianghuaw/nova,sebrandon1/nova,rahulunair/nova,mikalstill/nova,rajalokan/nova,openstack/nova,phenoxim/nova,openstack/nova,klmitch/nova,jianghuaw/nova,mikalstill/nova,gooddata/openstack-nova
|
# needs:fix_opt_description
# needs:check_deprecation_status
# needs:check_opt_group_and_type
# needs:fix_opt_description_indentation
# needs:fix_opt_registration_consistency
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_cache import core
def register_opts(conf):
core.configure(conf)
def list_opts():
return core._opts.list_opts()
Update tags for Cache config option
Updated tags for config options consistency [1].
[1] https://wiki.openstack.org/wiki/ConfigOptionsConsistency
Change-Id: I3f82d2b4d60028221bc861bfe0fe5dff6efd971f
Implements: Blueprint centralize-config-options-newton
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_cache import core
def register_opts(conf):
core.configure(conf)
def list_opts():
return core._opts.list_opts()
|
<commit_before># needs:fix_opt_description
# needs:check_deprecation_status
# needs:check_opt_group_and_type
# needs:fix_opt_description_indentation
# needs:fix_opt_registration_consistency
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_cache import core
def register_opts(conf):
core.configure(conf)
def list_opts():
return core._opts.list_opts()
<commit_msg>Update tags for Cache config option
Updated tags for config options consistency [1].
[1] https://wiki.openstack.org/wiki/ConfigOptionsConsistency
Change-Id: I3f82d2b4d60028221bc861bfe0fe5dff6efd971f
Implements: Blueprint centralize-config-options-newton<commit_after>
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_cache import core
def register_opts(conf):
core.configure(conf)
def list_opts():
return core._opts.list_opts()
|
# needs:fix_opt_description
# needs:check_deprecation_status
# needs:check_opt_group_and_type
# needs:fix_opt_description_indentation
# needs:fix_opt_registration_consistency
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_cache import core
def register_opts(conf):
core.configure(conf)
def list_opts():
return core._opts.list_opts()
Update tags for Cache config option
Updated tags for config options consistency [1].
[1] https://wiki.openstack.org/wiki/ConfigOptionsConsistency
Change-Id: I3f82d2b4d60028221bc861bfe0fe5dff6efd971f
Implements: Blueprint centralize-config-options-newton# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_cache import core
def register_opts(conf):
core.configure(conf)
def list_opts():
return core._opts.list_opts()
|
<commit_before># needs:fix_opt_description
# needs:check_deprecation_status
# needs:check_opt_group_and_type
# needs:fix_opt_description_indentation
# needs:fix_opt_registration_consistency
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_cache import core
def register_opts(conf):
core.configure(conf)
def list_opts():
return core._opts.list_opts()
<commit_msg>Update tags for Cache config option
Updated tags for config options consistency [1].
[1] https://wiki.openstack.org/wiki/ConfigOptionsConsistency
Change-Id: I3f82d2b4d60028221bc861bfe0fe5dff6efd971f
Implements: Blueprint centralize-config-options-newton<commit_after># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_cache import core
def register_opts(conf):
core.configure(conf)
def list_opts():
return core._opts.list_opts()
|
f5e7751835764a819678f58be0098cd7a62cb691
|
core/admin/mailu/internal/__init__.py
|
core/admin/mailu/internal/__init__.py
|
from mailu import limiter
import socket
import flask
internal = flask.Blueprint('internal', __name__)
@limiter.request_filter
def whitelist_webmail():
try:
return flask.request.headers["Client-Ip"] ==\
socket.gethostbyname("webmail")
except:
return False
from mailu.internal import views
|
from flask_limiter import RateLimitExceeded
from mailu import limiter
import socket
import flask
internal = flask.Blueprint('internal', __name__)
@internal.app_errorhandler(RateLimitExceeded)
def rate_limit_handler(e):
response = flask.Response()
response.headers['Auth-Status'] = 'Authentication rate limit from one source exceeded'
response.headers['Auth-Error-Code'] = '451 4.3.2'
if int(flask.request.headers['Auth-Login-Attempt']) < 10:
response.headers['Auth-Wait'] = '3'
return response
@limiter.request_filter
def whitelist_webmail():
try:
return flask.request.headers["Client-Ip"] ==\
socket.gethostbyname("webmail")
except:
return False
from mailu.internal import views
|
Return correct status codes from auth rate limiter failure.
|
Return correct status codes from auth rate limiter failure.
|
Python
|
mit
|
kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io
|
from mailu import limiter
import socket
import flask
internal = flask.Blueprint('internal', __name__)
@limiter.request_filter
def whitelist_webmail():
try:
return flask.request.headers["Client-Ip"] ==\
socket.gethostbyname("webmail")
except:
return False
from mailu.internal import views
Return correct status codes from auth rate limiter failure.
|
from flask_limiter import RateLimitExceeded
from mailu import limiter
import socket
import flask
internal = flask.Blueprint('internal', __name__)
@internal.app_errorhandler(RateLimitExceeded)
def rate_limit_handler(e):
response = flask.Response()
response.headers['Auth-Status'] = 'Authentication rate limit from one source exceeded'
response.headers['Auth-Error-Code'] = '451 4.3.2'
if int(flask.request.headers['Auth-Login-Attempt']) < 10:
response.headers['Auth-Wait'] = '3'
return response
@limiter.request_filter
def whitelist_webmail():
try:
return flask.request.headers["Client-Ip"] ==\
socket.gethostbyname("webmail")
except:
return False
from mailu.internal import views
|
<commit_before>from mailu import limiter
import socket
import flask
internal = flask.Blueprint('internal', __name__)
@limiter.request_filter
def whitelist_webmail():
try:
return flask.request.headers["Client-Ip"] ==\
socket.gethostbyname("webmail")
except:
return False
from mailu.internal import views
<commit_msg>Return correct status codes from auth rate limiter failure.<commit_after>
|
from flask_limiter import RateLimitExceeded
from mailu import limiter
import socket
import flask
internal = flask.Blueprint('internal', __name__)
@internal.app_errorhandler(RateLimitExceeded)
def rate_limit_handler(e):
response = flask.Response()
response.headers['Auth-Status'] = 'Authentication rate limit from one source exceeded'
response.headers['Auth-Error-Code'] = '451 4.3.2'
if int(flask.request.headers['Auth-Login-Attempt']) < 10:
response.headers['Auth-Wait'] = '3'
return response
@limiter.request_filter
def whitelist_webmail():
try:
return flask.request.headers["Client-Ip"] ==\
socket.gethostbyname("webmail")
except:
return False
from mailu.internal import views
|
from mailu import limiter
import socket
import flask
internal = flask.Blueprint('internal', __name__)
@limiter.request_filter
def whitelist_webmail():
try:
return flask.request.headers["Client-Ip"] ==\
socket.gethostbyname("webmail")
except:
return False
from mailu.internal import views
Return correct status codes from auth rate limiter failure.from flask_limiter import RateLimitExceeded
from mailu import limiter
import socket
import flask
internal = flask.Blueprint('internal', __name__)
@internal.app_errorhandler(RateLimitExceeded)
def rate_limit_handler(e):
response = flask.Response()
response.headers['Auth-Status'] = 'Authentication rate limit from one source exceeded'
response.headers['Auth-Error-Code'] = '451 4.3.2'
if int(flask.request.headers['Auth-Login-Attempt']) < 10:
response.headers['Auth-Wait'] = '3'
return response
@limiter.request_filter
def whitelist_webmail():
try:
return flask.request.headers["Client-Ip"] ==\
socket.gethostbyname("webmail")
except:
return False
from mailu.internal import views
|
<commit_before>from mailu import limiter
import socket
import flask
internal = flask.Blueprint('internal', __name__)
@limiter.request_filter
def whitelist_webmail():
try:
return flask.request.headers["Client-Ip"] ==\
socket.gethostbyname("webmail")
except:
return False
from mailu.internal import views
<commit_msg>Return correct status codes from auth rate limiter failure.<commit_after>from flask_limiter import RateLimitExceeded
from mailu import limiter
import socket
import flask
internal = flask.Blueprint('internal', __name__)
@internal.app_errorhandler(RateLimitExceeded)
def rate_limit_handler(e):
response = flask.Response()
response.headers['Auth-Status'] = 'Authentication rate limit from one source exceeded'
response.headers['Auth-Error-Code'] = '451 4.3.2'
if int(flask.request.headers['Auth-Login-Attempt']) < 10:
response.headers['Auth-Wait'] = '3'
return response
@limiter.request_filter
def whitelist_webmail():
try:
return flask.request.headers["Client-Ip"] ==\
socket.gethostbyname("webmail")
except:
return False
from mailu.internal import views
|
3c0b2806627347aeda52e19b77d84042deb16824
|
swfc_lt_stream/net.py
|
swfc_lt_stream/net.py
|
import enum
import functools
import operator
import struct
class Packet(enum.IntEnum):
connect = 0
disconnect = 1
data = 2
ack = 3
end = 4
def build_data_packet(window, blockseed, block):
payload = struct.pack('!II', window, blockseed) + block
return build_packet(Packet.data, payload)
def build_shift_packet(window_num):
payload = struct.pack('!I', window_num)
return build_packet(Packet.ack, payload)
def build_packet(type_, payload):
crc = functools.reduce(operator.xor, payload, type_)
packet = struct.pack('!B%dsB' % len(payload), type_, payload, crc)
return packet
def clean_packet(packet):
type_, payload, pack_crc = struct.unpack('!B%dsB' % (len(packet)-2), packet)
crc = functools.reduce(operator.xor, payload, type_)
if crc != pack_crc:
raise ValueError('Invalid packet check sum.')
if type_ == Packet.ack:
payload = struct.unpack('!I', payload)
elif type_ == Packet.data:
payload = *struct.unpack('!II', payload[:8]), payload[8:]
return type_, payload
|
import enum
import functools
import operator
import struct
class Packet(enum.IntEnum):
connect = 0
disconnect = 1
data = 2
ack = 3
end = 4
def build_data_packet(window, blockseed, block):
payload = struct.pack('!II', window, blockseed) + block
return build_packet(Packet.data, payload)
def build_shift_packet(window_num):
payload = struct.pack('!I', window_num)
return build_packet(Packet.ack, payload)
def build_packet(type_, payload):
crc = functools.reduce(operator.xor, payload, type_)
packet = struct.pack('!B%dsB' % len(payload), type_, payload, crc)
return packet
def clean_packet(packet):
type_, payload, pack_crc = struct.unpack('!B%dsB' % (len(packet)-2), packet)
crc = functools.reduce(operator.xor, payload, type_)
if crc != pack_crc:
raise ValueError('Invalid packet check sum.')
if type_ == Packet.ack:
payload = struct.unpack('!I', payload)
elif type_ == Packet.data:
payload = struct.unpack('!II%ds' % (len(payload) - 8), payload)
return type_, payload
|
Remove starred expression for 3.4 compatibility
|
Remove starred expression for 3.4 compatibility
|
Python
|
mit
|
anikey-m/swfc-lt-stream,anikey-m/swfc-lt-stream
|
import enum
import functools
import operator
import struct
class Packet(enum.IntEnum):
connect = 0
disconnect = 1
data = 2
ack = 3
end = 4
def build_data_packet(window, blockseed, block):
payload = struct.pack('!II', window, blockseed) + block
return build_packet(Packet.data, payload)
def build_shift_packet(window_num):
payload = struct.pack('!I', window_num)
return build_packet(Packet.ack, payload)
def build_packet(type_, payload):
crc = functools.reduce(operator.xor, payload, type_)
packet = struct.pack('!B%dsB' % len(payload), type_, payload, crc)
return packet
def clean_packet(packet):
type_, payload, pack_crc = struct.unpack('!B%dsB' % (len(packet)-2), packet)
crc = functools.reduce(operator.xor, payload, type_)
if crc != pack_crc:
raise ValueError('Invalid packet check sum.')
if type_ == Packet.ack:
payload = struct.unpack('!I', payload)
elif type_ == Packet.data:
payload = *struct.unpack('!II', payload[:8]), payload[8:]
return type_, payload
Remove starred expression for 3.4 compatibility
|
import enum
import functools
import operator
import struct
class Packet(enum.IntEnum):
connect = 0
disconnect = 1
data = 2
ack = 3
end = 4
def build_data_packet(window, blockseed, block):
payload = struct.pack('!II', window, blockseed) + block
return build_packet(Packet.data, payload)
def build_shift_packet(window_num):
payload = struct.pack('!I', window_num)
return build_packet(Packet.ack, payload)
def build_packet(type_, payload):
crc = functools.reduce(operator.xor, payload, type_)
packet = struct.pack('!B%dsB' % len(payload), type_, payload, crc)
return packet
def clean_packet(packet):
type_, payload, pack_crc = struct.unpack('!B%dsB' % (len(packet)-2), packet)
crc = functools.reduce(operator.xor, payload, type_)
if crc != pack_crc:
raise ValueError('Invalid packet check sum.')
if type_ == Packet.ack:
payload = struct.unpack('!I', payload)
elif type_ == Packet.data:
payload = struct.unpack('!II%ds' % (len(payload) - 8), payload)
return type_, payload
|
<commit_before>import enum
import functools
import operator
import struct
class Packet(enum.IntEnum):
connect = 0
disconnect = 1
data = 2
ack = 3
end = 4
def build_data_packet(window, blockseed, block):
payload = struct.pack('!II', window, blockseed) + block
return build_packet(Packet.data, payload)
def build_shift_packet(window_num):
payload = struct.pack('!I', window_num)
return build_packet(Packet.ack, payload)
def build_packet(type_, payload):
crc = functools.reduce(operator.xor, payload, type_)
packet = struct.pack('!B%dsB' % len(payload), type_, payload, crc)
return packet
def clean_packet(packet):
type_, payload, pack_crc = struct.unpack('!B%dsB' % (len(packet)-2), packet)
crc = functools.reduce(operator.xor, payload, type_)
if crc != pack_crc:
raise ValueError('Invalid packet check sum.')
if type_ == Packet.ack:
payload = struct.unpack('!I', payload)
elif type_ == Packet.data:
payload = *struct.unpack('!II', payload[:8]), payload[8:]
return type_, payload
<commit_msg>Remove starred expression for 3.4 compatibility<commit_after>
|
import enum
import functools
import operator
import struct
class Packet(enum.IntEnum):
connect = 0
disconnect = 1
data = 2
ack = 3
end = 4
def build_data_packet(window, blockseed, block):
payload = struct.pack('!II', window, blockseed) + block
return build_packet(Packet.data, payload)
def build_shift_packet(window_num):
payload = struct.pack('!I', window_num)
return build_packet(Packet.ack, payload)
def build_packet(type_, payload):
crc = functools.reduce(operator.xor, payload, type_)
packet = struct.pack('!B%dsB' % len(payload), type_, payload, crc)
return packet
def clean_packet(packet):
type_, payload, pack_crc = struct.unpack('!B%dsB' % (len(packet)-2), packet)
crc = functools.reduce(operator.xor, payload, type_)
if crc != pack_crc:
raise ValueError('Invalid packet check sum.')
if type_ == Packet.ack:
payload = struct.unpack('!I', payload)
elif type_ == Packet.data:
payload = struct.unpack('!II%ds' % (len(payload) - 8), payload)
return type_, payload
|
import enum
import functools
import operator
import struct
class Packet(enum.IntEnum):
connect = 0
disconnect = 1
data = 2
ack = 3
end = 4
def build_data_packet(window, blockseed, block):
payload = struct.pack('!II', window, blockseed) + block
return build_packet(Packet.data, payload)
def build_shift_packet(window_num):
payload = struct.pack('!I', window_num)
return build_packet(Packet.ack, payload)
def build_packet(type_, payload):
crc = functools.reduce(operator.xor, payload, type_)
packet = struct.pack('!B%dsB' % len(payload), type_, payload, crc)
return packet
def clean_packet(packet):
type_, payload, pack_crc = struct.unpack('!B%dsB' % (len(packet)-2), packet)
crc = functools.reduce(operator.xor, payload, type_)
if crc != pack_crc:
raise ValueError('Invalid packet check sum.')
if type_ == Packet.ack:
payload = struct.unpack('!I', payload)
elif type_ == Packet.data:
payload = *struct.unpack('!II', payload[:8]), payload[8:]
return type_, payload
Remove starred expression for 3.4 compatibilityimport enum
import functools
import operator
import struct
class Packet(enum.IntEnum):
connect = 0
disconnect = 1
data = 2
ack = 3
end = 4
def build_data_packet(window, blockseed, block):
payload = struct.pack('!II', window, blockseed) + block
return build_packet(Packet.data, payload)
def build_shift_packet(window_num):
payload = struct.pack('!I', window_num)
return build_packet(Packet.ack, payload)
def build_packet(type_, payload):
crc = functools.reduce(operator.xor, payload, type_)
packet = struct.pack('!B%dsB' % len(payload), type_, payload, crc)
return packet
def clean_packet(packet):
type_, payload, pack_crc = struct.unpack('!B%dsB' % (len(packet)-2), packet)
crc = functools.reduce(operator.xor, payload, type_)
if crc != pack_crc:
raise ValueError('Invalid packet check sum.')
if type_ == Packet.ack:
payload = struct.unpack('!I', payload)
elif type_ == Packet.data:
payload = struct.unpack('!II%ds' % (len(payload) - 8), payload)
return type_, payload
|
<commit_before>import enum
import functools
import operator
import struct
class Packet(enum.IntEnum):
connect = 0
disconnect = 1
data = 2
ack = 3
end = 4
def build_data_packet(window, blockseed, block):
payload = struct.pack('!II', window, blockseed) + block
return build_packet(Packet.data, payload)
def build_shift_packet(window_num):
payload = struct.pack('!I', window_num)
return build_packet(Packet.ack, payload)
def build_packet(type_, payload):
crc = functools.reduce(operator.xor, payload, type_)
packet = struct.pack('!B%dsB' % len(payload), type_, payload, crc)
return packet
def clean_packet(packet):
type_, payload, pack_crc = struct.unpack('!B%dsB' % (len(packet)-2), packet)
crc = functools.reduce(operator.xor, payload, type_)
if crc != pack_crc:
raise ValueError('Invalid packet check sum.')
if type_ == Packet.ack:
payload = struct.unpack('!I', payload)
elif type_ == Packet.data:
payload = *struct.unpack('!II', payload[:8]), payload[8:]
return type_, payload
<commit_msg>Remove starred expression for 3.4 compatibility<commit_after>import enum
import functools
import operator
import struct
class Packet(enum.IntEnum):
connect = 0
disconnect = 1
data = 2
ack = 3
end = 4
def build_data_packet(window, blockseed, block):
payload = struct.pack('!II', window, blockseed) + block
return build_packet(Packet.data, payload)
def build_shift_packet(window_num):
payload = struct.pack('!I', window_num)
return build_packet(Packet.ack, payload)
def build_packet(type_, payload):
crc = functools.reduce(operator.xor, payload, type_)
packet = struct.pack('!B%dsB' % len(payload), type_, payload, crc)
return packet
def clean_packet(packet):
type_, payload, pack_crc = struct.unpack('!B%dsB' % (len(packet)-2), packet)
crc = functools.reduce(operator.xor, payload, type_)
if crc != pack_crc:
raise ValueError('Invalid packet check sum.')
if type_ == Packet.ack:
payload = struct.unpack('!I', payload)
elif type_ == Packet.data:
payload = struct.unpack('!II%ds' % (len(payload) - 8), payload)
return type_, payload
|
347faf7f550253bb076accbb1c4ecaba9d906324
|
talks/events/forms.py
|
talks/events/forms.py
|
from django import forms
from .models import Event, EventGroup
class EventForm(forms.ModelForm):
class Meta:
fields = ('title', 'description', 'speakers', 'location', 'start', 'end')
model = Event
labels = {
'description': 'Abstract',
'speakers': 'Speaker',
'location': 'Venue',
}
widgets = {
'speakers': forms.TextInput,
'location': forms.TextInput,
}
class EventGroupForm(forms.ModelForm):
enabled = forms.BooleanField(label='Add to a group?')
form_enabled = forms.CharField(widget=forms.HiddenInput)
select_enabled = forms.CharField(widget=forms.HiddenInput)
event_group_select = forms.ModelChoiceField(
queryset=EventGroup.objects.all(),
required=False,
label="Existing group")
class Meta:
fields = ('form_enabled', 'select_enabled', 'event_group_select', 'title', 'description')
model = EventGroup
|
from django import forms
from .models import Event, EventGroup
class EventForm(forms.ModelForm):
class Meta:
fields = ('title', 'description', 'speakers', 'location', 'start', 'end')
model = Event
labels = {
'description': 'Abstract',
'speakers': 'Speaker',
'location': 'Venue',
}
widgets = {
'speakers': forms.TextInput,
'location': forms.TextInput,
}
class EventGroupForm(forms.ModelForm):
enabled = forms.BooleanField(label='Add to a group?')
form_enabled = forms.BooleanField(required=False)
select_enabled = forms.BooleanField(required=False)
event_group_select = forms.ModelChoiceField(
queryset=EventGroup.objects.all(),
required=False,
label="Existing group")
class Meta:
fields = ('form_enabled', 'select_enabled', 'event_group_select', 'title', 'description')
model = EventGroup
def clean(self):
cleaned_data = super(EventGroupForm, self).clean()
if 'enabled' in cleaned_data:
if 'form_enabled' in cleaned_data:
return cleaned_data
elif 'select_enabled' in cleaned_data:
self.errors['title'] = None
self.errors['description'] = None
if not cleaned_data.get('event_group_select', None):
self.add_error('event_group_select', "Select an Event Group")
return cleaned_data
else:
return {}
def get_event_group(self):
# Form has been completed and user has selected an event group
valid = self.is_valid()
if 'enabled' in self.cleaned_data:
# Creating a new EventGroup
if valid and 'form_enabled' in self.cleaned_data:
return self.save()
elif 'select_enabled' in self.cleaned_data and 'event_group_select' in self.cleaned_data:
return self.cleaned_data['event_group_select']
return None
def is_enabled(self):
self.is_valid()
return 'enabled' in self.cleaned_data
|
Validate between the Select and create new EventGroup
|
Validate between the Select and create new EventGroup
Added simple booleans to manage this selection for now
|
Python
|
apache-2.0
|
ox-it/talks.ox,ox-it/talks.ox,ox-it/talks.ox
|
from django import forms
from .models import Event, EventGroup
class EventForm(forms.ModelForm):
class Meta:
fields = ('title', 'description', 'speakers', 'location', 'start', 'end')
model = Event
labels = {
'description': 'Abstract',
'speakers': 'Speaker',
'location': 'Venue',
}
widgets = {
'speakers': forms.TextInput,
'location': forms.TextInput,
}
class EventGroupForm(forms.ModelForm):
enabled = forms.BooleanField(label='Add to a group?')
form_enabled = forms.CharField(widget=forms.HiddenInput)
select_enabled = forms.CharField(widget=forms.HiddenInput)
event_group_select = forms.ModelChoiceField(
queryset=EventGroup.objects.all(),
required=False,
label="Existing group")
class Meta:
fields = ('form_enabled', 'select_enabled', 'event_group_select', 'title', 'description')
model = EventGroup
Validate between the Select and create new EventGroup
Added simple booleans to manage this selection for now
|
from django import forms
from .models import Event, EventGroup
class EventForm(forms.ModelForm):
class Meta:
fields = ('title', 'description', 'speakers', 'location', 'start', 'end')
model = Event
labels = {
'description': 'Abstract',
'speakers': 'Speaker',
'location': 'Venue',
}
widgets = {
'speakers': forms.TextInput,
'location': forms.TextInput,
}
class EventGroupForm(forms.ModelForm):
enabled = forms.BooleanField(label='Add to a group?')
form_enabled = forms.BooleanField(required=False)
select_enabled = forms.BooleanField(required=False)
event_group_select = forms.ModelChoiceField(
queryset=EventGroup.objects.all(),
required=False,
label="Existing group")
class Meta:
fields = ('form_enabled', 'select_enabled', 'event_group_select', 'title', 'description')
model = EventGroup
def clean(self):
cleaned_data = super(EventGroupForm, self).clean()
if 'enabled' in cleaned_data:
if 'form_enabled' in cleaned_data:
return cleaned_data
elif 'select_enabled' in cleaned_data:
self.errors['title'] = None
self.errors['description'] = None
if not cleaned_data.get('event_group_select', None):
self.add_error('event_group_select', "Select an Event Group")
return cleaned_data
else:
return {}
def get_event_group(self):
# Form has been completed and user has selected an event group
valid = self.is_valid()
if 'enabled' in self.cleaned_data:
# Creating a new EventGroup
if valid and 'form_enabled' in self.cleaned_data:
return self.save()
elif 'select_enabled' in self.cleaned_data and 'event_group_select' in self.cleaned_data:
return self.cleaned_data['event_group_select']
return None
def is_enabled(self):
self.is_valid()
return 'enabled' in self.cleaned_data
|
<commit_before>from django import forms
from .models import Event, EventGroup
class EventForm(forms.ModelForm):
class Meta:
fields = ('title', 'description', 'speakers', 'location', 'start', 'end')
model = Event
labels = {
'description': 'Abstract',
'speakers': 'Speaker',
'location': 'Venue',
}
widgets = {
'speakers': forms.TextInput,
'location': forms.TextInput,
}
class EventGroupForm(forms.ModelForm):
enabled = forms.BooleanField(label='Add to a group?')
form_enabled = forms.CharField(widget=forms.HiddenInput)
select_enabled = forms.CharField(widget=forms.HiddenInput)
event_group_select = forms.ModelChoiceField(
queryset=EventGroup.objects.all(),
required=False,
label="Existing group")
class Meta:
fields = ('form_enabled', 'select_enabled', 'event_group_select', 'title', 'description')
model = EventGroup
<commit_msg>Validate between the Select and create new EventGroup
Added simple booleans to manage this selection for now<commit_after>
|
from django import forms
from .models import Event, EventGroup
class EventForm(forms.ModelForm):
class Meta:
fields = ('title', 'description', 'speakers', 'location', 'start', 'end')
model = Event
labels = {
'description': 'Abstract',
'speakers': 'Speaker',
'location': 'Venue',
}
widgets = {
'speakers': forms.TextInput,
'location': forms.TextInput,
}
class EventGroupForm(forms.ModelForm):
enabled = forms.BooleanField(label='Add to a group?')
form_enabled = forms.BooleanField(required=False)
select_enabled = forms.BooleanField(required=False)
event_group_select = forms.ModelChoiceField(
queryset=EventGroup.objects.all(),
required=False,
label="Existing group")
class Meta:
fields = ('form_enabled', 'select_enabled', 'event_group_select', 'title', 'description')
model = EventGroup
def clean(self):
cleaned_data = super(EventGroupForm, self).clean()
if 'enabled' in cleaned_data:
if 'form_enabled' in cleaned_data:
return cleaned_data
elif 'select_enabled' in cleaned_data:
self.errors['title'] = None
self.errors['description'] = None
if not cleaned_data.get('event_group_select', None):
self.add_error('event_group_select', "Select an Event Group")
return cleaned_data
else:
return {}
def get_event_group(self):
# Form has been completed and user has selected an event group
valid = self.is_valid()
if 'enabled' in self.cleaned_data:
# Creating a new EventGroup
if valid and 'form_enabled' in self.cleaned_data:
return self.save()
elif 'select_enabled' in self.cleaned_data and 'event_group_select' in self.cleaned_data:
return self.cleaned_data['event_group_select']
return None
def is_enabled(self):
self.is_valid()
return 'enabled' in self.cleaned_data
|
from django import forms
from .models import Event, EventGroup
class EventForm(forms.ModelForm):
class Meta:
fields = ('title', 'description', 'speakers', 'location', 'start', 'end')
model = Event
labels = {
'description': 'Abstract',
'speakers': 'Speaker',
'location': 'Venue',
}
widgets = {
'speakers': forms.TextInput,
'location': forms.TextInput,
}
class EventGroupForm(forms.ModelForm):
enabled = forms.BooleanField(label='Add to a group?')
form_enabled = forms.CharField(widget=forms.HiddenInput)
select_enabled = forms.CharField(widget=forms.HiddenInput)
event_group_select = forms.ModelChoiceField(
queryset=EventGroup.objects.all(),
required=False,
label="Existing group")
class Meta:
fields = ('form_enabled', 'select_enabled', 'event_group_select', 'title', 'description')
model = EventGroup
Validate between the Select and create new EventGroup
Added simple booleans to manage this selection for nowfrom django import forms
from .models import Event, EventGroup
class EventForm(forms.ModelForm):
class Meta:
fields = ('title', 'description', 'speakers', 'location', 'start', 'end')
model = Event
labels = {
'description': 'Abstract',
'speakers': 'Speaker',
'location': 'Venue',
}
widgets = {
'speakers': forms.TextInput,
'location': forms.TextInput,
}
class EventGroupForm(forms.ModelForm):
enabled = forms.BooleanField(label='Add to a group?')
form_enabled = forms.BooleanField(required=False)
select_enabled = forms.BooleanField(required=False)
event_group_select = forms.ModelChoiceField(
queryset=EventGroup.objects.all(),
required=False,
label="Existing group")
class Meta:
fields = ('form_enabled', 'select_enabled', 'event_group_select', 'title', 'description')
model = EventGroup
def clean(self):
cleaned_data = super(EventGroupForm, self).clean()
if 'enabled' in cleaned_data:
if 'form_enabled' in cleaned_data:
return cleaned_data
elif 'select_enabled' in cleaned_data:
self.errors['title'] = None
self.errors['description'] = None
if not cleaned_data.get('event_group_select', None):
self.add_error('event_group_select', "Select an Event Group")
return cleaned_data
else:
return {}
def get_event_group(self):
# Form has been completed and user has selected an event group
valid = self.is_valid()
if 'enabled' in self.cleaned_data:
# Creating a new EventGroup
if valid and 'form_enabled' in self.cleaned_data:
return self.save()
elif 'select_enabled' in self.cleaned_data and 'event_group_select' in self.cleaned_data:
return self.cleaned_data['event_group_select']
return None
def is_enabled(self):
self.is_valid()
return 'enabled' in self.cleaned_data
|
<commit_before>from django import forms
from .models import Event, EventGroup
class EventForm(forms.ModelForm):
class Meta:
fields = ('title', 'description', 'speakers', 'location', 'start', 'end')
model = Event
labels = {
'description': 'Abstract',
'speakers': 'Speaker',
'location': 'Venue',
}
widgets = {
'speakers': forms.TextInput,
'location': forms.TextInput,
}
class EventGroupForm(forms.ModelForm):
enabled = forms.BooleanField(label='Add to a group?')
form_enabled = forms.CharField(widget=forms.HiddenInput)
select_enabled = forms.CharField(widget=forms.HiddenInput)
event_group_select = forms.ModelChoiceField(
queryset=EventGroup.objects.all(),
required=False,
label="Existing group")
class Meta:
fields = ('form_enabled', 'select_enabled', 'event_group_select', 'title', 'description')
model = EventGroup
<commit_msg>Validate between the Select and create new EventGroup
Added simple booleans to manage this selection for now<commit_after>from django import forms
from .models import Event, EventGroup
class EventForm(forms.ModelForm):
class Meta:
fields = ('title', 'description', 'speakers', 'location', 'start', 'end')
model = Event
labels = {
'description': 'Abstract',
'speakers': 'Speaker',
'location': 'Venue',
}
widgets = {
'speakers': forms.TextInput,
'location': forms.TextInput,
}
class EventGroupForm(forms.ModelForm):
enabled = forms.BooleanField(label='Add to a group?')
form_enabled = forms.BooleanField(required=False)
select_enabled = forms.BooleanField(required=False)
event_group_select = forms.ModelChoiceField(
queryset=EventGroup.objects.all(),
required=False,
label="Existing group")
class Meta:
fields = ('form_enabled', 'select_enabled', 'event_group_select', 'title', 'description')
model = EventGroup
def clean(self):
cleaned_data = super(EventGroupForm, self).clean()
if 'enabled' in cleaned_data:
if 'form_enabled' in cleaned_data:
return cleaned_data
elif 'select_enabled' in cleaned_data:
self.errors['title'] = None
self.errors['description'] = None
if not cleaned_data.get('event_group_select', None):
self.add_error('event_group_select', "Select an Event Group")
return cleaned_data
else:
return {}
def get_event_group(self):
# Form has been completed and user has selected an event group
valid = self.is_valid()
if 'enabled' in self.cleaned_data:
# Creating a new EventGroup
if valid and 'form_enabled' in self.cleaned_data:
return self.save()
elif 'select_enabled' in self.cleaned_data and 'event_group_select' in self.cleaned_data:
return self.cleaned_data['event_group_select']
return None
def is_enabled(self):
self.is_valid()
return 'enabled' in self.cleaned_data
|
643ea571b795ed933afac13e38e1aee9f5fec4b6
|
openminted/Ab3P.py
|
openminted/Ab3P.py
|
#!/usr/bin/env python
import argparse
import pubrunner
import pubrunner.command_line
import os
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Main access point for OpenMinTeD Docker component')
parser.add_argument('--input',required=True,type=str,help='Input directory')
parser.add_argument('--output',required=True,type=str,help='Output directory')
args = parser.parse_args()
assert os.path.isdir(args.input)
assert os.path.isdir(args.output)
inputFormat = 'uimaxmi'
githubRepo = 'https://github.com/jakelever/Ab3P'
sys.argv = ['pubrunner']
sys.argv += ['--defaultsettings']
sys.argv += ['--forceresource_dir', args.input]
sys.argv += ['--forceresource_format', inputFormat]
sys.argv += ['--outputdir', args.output]
sys.argv += [githubRepo]
pubrunner.command_line.main()
|
#!/usr/bin/env python
import argparse
import pubrunner
import pubrunner.command_line
import os
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Main access point for OpenMinTeD Docker component')
parser.add_argument('--input',required=True,type=str,help='Input directory')
parser.add_argument('--output',required=True,type=str,help='Output directory')
parser.add_argument('--param:language',required=False,type=str,help='Ignored language parameter')
args = parser.parse_args()
assert os.path.isdir(args.input)
assert os.path.isdir(args.output)
inputFormat = 'uimaxmi'
githubRepo = 'https://github.com/jakelever/Ab3P'
sys.argv = ['pubrunner']
sys.argv += ['--defaultsettings']
sys.argv += ['--forceresource_dir', args.input]
sys.argv += ['--forceresource_format', inputFormat]
sys.argv += ['--outputdir', args.output]
sys.argv += [githubRepo]
pubrunner.command_line.main()
|
Add unused param:language flag for OpenMinTeD purposes
|
Add unused param:language flag for OpenMinTeD purposes
|
Python
|
mit
|
jakelever/pubrunner,jakelever/pubrunner
|
#!/usr/bin/env python
import argparse
import pubrunner
import pubrunner.command_line
import os
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Main access point for OpenMinTeD Docker component')
parser.add_argument('--input',required=True,type=str,help='Input directory')
parser.add_argument('--output',required=True,type=str,help='Output directory')
args = parser.parse_args()
assert os.path.isdir(args.input)
assert os.path.isdir(args.output)
inputFormat = 'uimaxmi'
githubRepo = 'https://github.com/jakelever/Ab3P'
sys.argv = ['pubrunner']
sys.argv += ['--defaultsettings']
sys.argv += ['--forceresource_dir', args.input]
sys.argv += ['--forceresource_format', inputFormat]
sys.argv += ['--outputdir', args.output]
sys.argv += [githubRepo]
pubrunner.command_line.main()
Add unused param:language flag for OpenMinTeD purposes
|
#!/usr/bin/env python
import argparse
import pubrunner
import pubrunner.command_line
import os
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Main access point for OpenMinTeD Docker component')
parser.add_argument('--input',required=True,type=str,help='Input directory')
parser.add_argument('--output',required=True,type=str,help='Output directory')
parser.add_argument('--param:language',required=False,type=str,help='Ignored language parameter')
args = parser.parse_args()
assert os.path.isdir(args.input)
assert os.path.isdir(args.output)
inputFormat = 'uimaxmi'
githubRepo = 'https://github.com/jakelever/Ab3P'
sys.argv = ['pubrunner']
sys.argv += ['--defaultsettings']
sys.argv += ['--forceresource_dir', args.input]
sys.argv += ['--forceresource_format', inputFormat]
sys.argv += ['--outputdir', args.output]
sys.argv += [githubRepo]
pubrunner.command_line.main()
|
<commit_before>#!/usr/bin/env python
import argparse
import pubrunner
import pubrunner.command_line
import os
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Main access point for OpenMinTeD Docker component')
parser.add_argument('--input',required=True,type=str,help='Input directory')
parser.add_argument('--output',required=True,type=str,help='Output directory')
args = parser.parse_args()
assert os.path.isdir(args.input)
assert os.path.isdir(args.output)
inputFormat = 'uimaxmi'
githubRepo = 'https://github.com/jakelever/Ab3P'
sys.argv = ['pubrunner']
sys.argv += ['--defaultsettings']
sys.argv += ['--forceresource_dir', args.input]
sys.argv += ['--forceresource_format', inputFormat]
sys.argv += ['--outputdir', args.output]
sys.argv += [githubRepo]
pubrunner.command_line.main()
<commit_msg>Add unused param:language flag for OpenMinTeD purposes<commit_after>
|
#!/usr/bin/env python
import argparse
import pubrunner
import pubrunner.command_line
import os
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Main access point for OpenMinTeD Docker component')
parser.add_argument('--input',required=True,type=str,help='Input directory')
parser.add_argument('--output',required=True,type=str,help='Output directory')
parser.add_argument('--param:language',required=False,type=str,help='Ignored language parameter')
args = parser.parse_args()
assert os.path.isdir(args.input)
assert os.path.isdir(args.output)
inputFormat = 'uimaxmi'
githubRepo = 'https://github.com/jakelever/Ab3P'
sys.argv = ['pubrunner']
sys.argv += ['--defaultsettings']
sys.argv += ['--forceresource_dir', args.input]
sys.argv += ['--forceresource_format', inputFormat]
sys.argv += ['--outputdir', args.output]
sys.argv += [githubRepo]
pubrunner.command_line.main()
|
#!/usr/bin/env python
import argparse
import pubrunner
import pubrunner.command_line
import os
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Main access point for OpenMinTeD Docker component')
parser.add_argument('--input',required=True,type=str,help='Input directory')
parser.add_argument('--output',required=True,type=str,help='Output directory')
args = parser.parse_args()
assert os.path.isdir(args.input)
assert os.path.isdir(args.output)
inputFormat = 'uimaxmi'
githubRepo = 'https://github.com/jakelever/Ab3P'
sys.argv = ['pubrunner']
sys.argv += ['--defaultsettings']
sys.argv += ['--forceresource_dir', args.input]
sys.argv += ['--forceresource_format', inputFormat]
sys.argv += ['--outputdir', args.output]
sys.argv += [githubRepo]
pubrunner.command_line.main()
Add unused param:language flag for OpenMinTeD purposes#!/usr/bin/env python
import argparse
import pubrunner
import pubrunner.command_line
import os
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Main access point for OpenMinTeD Docker component')
parser.add_argument('--input',required=True,type=str,help='Input directory')
parser.add_argument('--output',required=True,type=str,help='Output directory')
parser.add_argument('--param:language',required=False,type=str,help='Ignored language parameter')
args = parser.parse_args()
assert os.path.isdir(args.input)
assert os.path.isdir(args.output)
inputFormat = 'uimaxmi'
githubRepo = 'https://github.com/jakelever/Ab3P'
sys.argv = ['pubrunner']
sys.argv += ['--defaultsettings']
sys.argv += ['--forceresource_dir', args.input]
sys.argv += ['--forceresource_format', inputFormat]
sys.argv += ['--outputdir', args.output]
sys.argv += [githubRepo]
pubrunner.command_line.main()
|
<commit_before>#!/usr/bin/env python
import argparse
import pubrunner
import pubrunner.command_line
import os
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Main access point for OpenMinTeD Docker component')
parser.add_argument('--input',required=True,type=str,help='Input directory')
parser.add_argument('--output',required=True,type=str,help='Output directory')
args = parser.parse_args()
assert os.path.isdir(args.input)
assert os.path.isdir(args.output)
inputFormat = 'uimaxmi'
githubRepo = 'https://github.com/jakelever/Ab3P'
sys.argv = ['pubrunner']
sys.argv += ['--defaultsettings']
sys.argv += ['--forceresource_dir', args.input]
sys.argv += ['--forceresource_format', inputFormat]
sys.argv += ['--outputdir', args.output]
sys.argv += [githubRepo]
pubrunner.command_line.main()
<commit_msg>Add unused param:language flag for OpenMinTeD purposes<commit_after>#!/usr/bin/env python
import argparse
import pubrunner
import pubrunner.command_line
import os
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Main access point for OpenMinTeD Docker component')
parser.add_argument('--input',required=True,type=str,help='Input directory')
parser.add_argument('--output',required=True,type=str,help='Output directory')
parser.add_argument('--param:language',required=False,type=str,help='Ignored language parameter')
args = parser.parse_args()
assert os.path.isdir(args.input)
assert os.path.isdir(args.output)
inputFormat = 'uimaxmi'
githubRepo = 'https://github.com/jakelever/Ab3P'
sys.argv = ['pubrunner']
sys.argv += ['--defaultsettings']
sys.argv += ['--forceresource_dir', args.input]
sys.argv += ['--forceresource_format', inputFormat]
sys.argv += ['--outputdir', args.output]
sys.argv += [githubRepo]
pubrunner.command_line.main()
|
7c97bbe3e25f7cc8953fd286a0736ede09f97dcf
|
paper/replicate.py
|
paper/replicate.py
|
import os
# Best Python command on your system
my_python = "python"
print("This script should download and install DNest4 and \
replicate all the runs presented in the paper.\nNote:\
plots will be generated, which need to be manually\n\
closed for the script to continue. It assumes\n\
everything has been compiled already.\n\
\n\
NOTE: the RJObject_1DMixture example is fairly time-consuming.\n\n\
Press ENTER to begin.")
junk = input()
os.system("wget https://github.com/eggplantbren/DNest4/archive/0.1.3.tar.gz")
os.system("tar xvzf 0.1.3.tar.gz")
os.system("mv DNest4-0.1.3 DNest4")
os.chdir("DNest4/code")
os.system("make")
os.chdir("../python")
os.system("python setup.py install")
def run_example(directory):
os.chdir(directory)
os.system("./main -s 0")
os.system(my_python + " showresults.py")
os.chdir("..")
os.chdir("../code/Examples")
run_example("StraightLine")
run_example("RJObject_1DMixture")
run_example("ABC")
|
import os
import matplotlib.pyplot
# Best Python command on your system
my_python = "/home/brendon/local/anaconda3/bin/python"
print("This script should\
replicate all the runs presented in the paper.\nNote:\
plots will be generated, which need to be manually\n\
closed for the script to continue. It assumes\n\
everything has been compiled already as per the instructions in the paper.\n\
\n\
NOTE: the RJObject_1DMixture example is fairly time-consuming.\n\n\
Press ENTER to begin.")
junk = input()
#os.system("wget https://github.com/eggplantbren/DNest4/archive/0.1.3.tar.gz")
#os.system("tar xvzf 0.1.3.tar.gz")
#os.system("mv DNest4-0.1.3 DNest4")
#os.chdir("DNest4/code")
#os.system("make")
#os.chdir("../python")
#os.system("python setup.py install")
def run_example(directory):
os.chdir(directory)
os.system("./main -s 0")
os.system(my_python + " showresults.py")
os.chdir("..")
os.chdir("../code/Examples")
run_example("StraightLine")
run_example("RJObject_1DMixture")
run_example("ABC")
|
Use existing code (don't clone again)
|
Use existing code (don't clone again)
|
Python
|
mit
|
eggplantbren/DNest4,eggplantbren/DNest4,eggplantbren/DNest4,eggplantbren/DNest4,eggplantbren/DNest4
|
import os
# Best Python command on your system
my_python = "python"
print("This script should download and install DNest4 and \
replicate all the runs presented in the paper.\nNote:\
plots will be generated, which need to be manually\n\
closed for the script to continue. It assumes\n\
everything has been compiled already.\n\
\n\
NOTE: the RJObject_1DMixture example is fairly time-consuming.\n\n\
Press ENTER to begin.")
junk = input()
os.system("wget https://github.com/eggplantbren/DNest4/archive/0.1.3.tar.gz")
os.system("tar xvzf 0.1.3.tar.gz")
os.system("mv DNest4-0.1.3 DNest4")
os.chdir("DNest4/code")
os.system("make")
os.chdir("../python")
os.system("python setup.py install")
def run_example(directory):
os.chdir(directory)
os.system("./main -s 0")
os.system(my_python + " showresults.py")
os.chdir("..")
os.chdir("../code/Examples")
run_example("StraightLine")
run_example("RJObject_1DMixture")
run_example("ABC")
Use existing code (don't clone again)
|
import os
import matplotlib.pyplot
# Best Python command on your system
my_python = "/home/brendon/local/anaconda3/bin/python"
print("This script should\
replicate all the runs presented in the paper.\nNote:\
plots will be generated, which need to be manually\n\
closed for the script to continue. It assumes\n\
everything has been compiled already as per the instructions in the paper.\n\
\n\
NOTE: the RJObject_1DMixture example is fairly time-consuming.\n\n\
Press ENTER to begin.")
junk = input()
#os.system("wget https://github.com/eggplantbren/DNest4/archive/0.1.3.tar.gz")
#os.system("tar xvzf 0.1.3.tar.gz")
#os.system("mv DNest4-0.1.3 DNest4")
#os.chdir("DNest4/code")
#os.system("make")
#os.chdir("../python")
#os.system("python setup.py install")
def run_example(directory):
os.chdir(directory)
os.system("./main -s 0")
os.system(my_python + " showresults.py")
os.chdir("..")
os.chdir("../code/Examples")
run_example("StraightLine")
run_example("RJObject_1DMixture")
run_example("ABC")
|
<commit_before>import os
# Best Python command on your system
my_python = "python"
print("This script should download and install DNest4 and \
replicate all the runs presented in the paper.\nNote:\
plots will be generated, which need to be manually\n\
closed for the script to continue. It assumes\n\
everything has been compiled already.\n\
\n\
NOTE: the RJObject_1DMixture example is fairly time-consuming.\n\n\
Press ENTER to begin.")
junk = input()
os.system("wget https://github.com/eggplantbren/DNest4/archive/0.1.3.tar.gz")
os.system("tar xvzf 0.1.3.tar.gz")
os.system("mv DNest4-0.1.3 DNest4")
os.chdir("DNest4/code")
os.system("make")
os.chdir("../python")
os.system("python setup.py install")
def run_example(directory):
os.chdir(directory)
os.system("./main -s 0")
os.system(my_python + " showresults.py")
os.chdir("..")
os.chdir("../code/Examples")
run_example("StraightLine")
run_example("RJObject_1DMixture")
run_example("ABC")
<commit_msg>Use existing code (don't clone again)<commit_after>
|
import os
import matplotlib.pyplot
# Best Python command on your system
my_python = "/home/brendon/local/anaconda3/bin/python"
print("This script should\
replicate all the runs presented in the paper.\nNote:\
plots will be generated, which need to be manually\n\
closed for the script to continue. It assumes\n\
everything has been compiled already as per the instructions in the paper.\n\
\n\
NOTE: the RJObject_1DMixture example is fairly time-consuming.\n\n\
Press ENTER to begin.")
junk = input()
#os.system("wget https://github.com/eggplantbren/DNest4/archive/0.1.3.tar.gz")
#os.system("tar xvzf 0.1.3.tar.gz")
#os.system("mv DNest4-0.1.3 DNest4")
#os.chdir("DNest4/code")
#os.system("make")
#os.chdir("../python")
#os.system("python setup.py install")
def run_example(directory):
os.chdir(directory)
os.system("./main -s 0")
os.system(my_python + " showresults.py")
os.chdir("..")
os.chdir("../code/Examples")
run_example("StraightLine")
run_example("RJObject_1DMixture")
run_example("ABC")
|
import os
# Best Python command on your system
my_python = "python"
print("This script should download and install DNest4 and \
replicate all the runs presented in the paper.\nNote:\
plots will be generated, which need to be manually\n\
closed for the script to continue. It assumes\n\
everything has been compiled already.\n\
\n\
NOTE: the RJObject_1DMixture example is fairly time-consuming.\n\n\
Press ENTER to begin.")
junk = input()
os.system("wget https://github.com/eggplantbren/DNest4/archive/0.1.3.tar.gz")
os.system("tar xvzf 0.1.3.tar.gz")
os.system("mv DNest4-0.1.3 DNest4")
os.chdir("DNest4/code")
os.system("make")
os.chdir("../python")
os.system("python setup.py install")
def run_example(directory):
os.chdir(directory)
os.system("./main -s 0")
os.system(my_python + " showresults.py")
os.chdir("..")
os.chdir("../code/Examples")
run_example("StraightLine")
run_example("RJObject_1DMixture")
run_example("ABC")
Use existing code (don't clone again)import os
import matplotlib.pyplot
# Best Python command on your system
my_python = "/home/brendon/local/anaconda3/bin/python"
print("This script should\
replicate all the runs presented in the paper.\nNote:\
plots will be generated, which need to be manually\n\
closed for the script to continue. It assumes\n\
everything has been compiled already as per the instructions in the paper.\n\
\n\
NOTE: the RJObject_1DMixture example is fairly time-consuming.\n\n\
Press ENTER to begin.")
junk = input()
#os.system("wget https://github.com/eggplantbren/DNest4/archive/0.1.3.tar.gz")
#os.system("tar xvzf 0.1.3.tar.gz")
#os.system("mv DNest4-0.1.3 DNest4")
#os.chdir("DNest4/code")
#os.system("make")
#os.chdir("../python")
#os.system("python setup.py install")
def run_example(directory):
os.chdir(directory)
os.system("./main -s 0")
os.system(my_python + " showresults.py")
os.chdir("..")
os.chdir("../code/Examples")
run_example("StraightLine")
run_example("RJObject_1DMixture")
run_example("ABC")
|
<commit_before>import os
# Best Python command on your system
my_python = "python"
print("This script should download and install DNest4 and \
replicate all the runs presented in the paper.\nNote:\
plots will be generated, which need to be manually\n\
closed for the script to continue. It assumes\n\
everything has been compiled already.\n\
\n\
NOTE: the RJObject_1DMixture example is fairly time-consuming.\n\n\
Press ENTER to begin.")
junk = input()
os.system("wget https://github.com/eggplantbren/DNest4/archive/0.1.3.tar.gz")
os.system("tar xvzf 0.1.3.tar.gz")
os.system("mv DNest4-0.1.3 DNest4")
os.chdir("DNest4/code")
os.system("make")
os.chdir("../python")
os.system("python setup.py install")
def run_example(directory):
os.chdir(directory)
os.system("./main -s 0")
os.system(my_python + " showresults.py")
os.chdir("..")
os.chdir("../code/Examples")
run_example("StraightLine")
run_example("RJObject_1DMixture")
run_example("ABC")
<commit_msg>Use existing code (don't clone again)<commit_after>import os
import matplotlib.pyplot
# Best Python command on your system
my_python = "/home/brendon/local/anaconda3/bin/python"
print("This script should\
replicate all the runs presented in the paper.\nNote:\
plots will be generated, which need to be manually\n\
closed for the script to continue. It assumes\n\
everything has been compiled already as per the instructions in the paper.\n\
\n\
NOTE: the RJObject_1DMixture example is fairly time-consuming.\n\n\
Press ENTER to begin.")
junk = input()
#os.system("wget https://github.com/eggplantbren/DNest4/archive/0.1.3.tar.gz")
#os.system("tar xvzf 0.1.3.tar.gz")
#os.system("mv DNest4-0.1.3 DNest4")
#os.chdir("DNest4/code")
#os.system("make")
#os.chdir("../python")
#os.system("python setup.py install")
def run_example(directory):
os.chdir(directory)
os.system("./main -s 0")
os.system(my_python + " showresults.py")
os.chdir("..")
os.chdir("../code/Examples")
run_example("StraightLine")
run_example("RJObject_1DMixture")
run_example("ABC")
|
6a095a8a140b8056c5a17467d3249c1ab9bba8f4
|
grammpy/IsMethodsRuleExtension.py
|
grammpy/IsMethodsRuleExtension.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Rule import Rule
class IsMethodsRuleExtension(Rule):
@classmethod
def is_regular(cls):
return False
@classmethod
def is_contextfree(cls):
return False
@classmethod
def is_context(cls):
return False
@classmethod
def is_unrestricted(cls):
return False
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Rule import Rule
class IsMethodsRuleExtension(Rule):
@classmethod
def is_regular(cls):
return False
@classmethod
def is_contextfree(cls):
return False
@classmethod
def is_context(cls):
return False
@classmethod
def is_unrestricted(cls):
return False
@classmethod
def is_valid(cls):
return False
|
Add header of Rule.isValid method
|
Add header of Rule.isValid method
|
Python
|
mit
|
PatrikValkovic/grammpy
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Rule import Rule
class IsMethodsRuleExtension(Rule):
@classmethod
def is_regular(cls):
return False
@classmethod
def is_contextfree(cls):
return False
@classmethod
def is_context(cls):
return False
@classmethod
def is_unrestricted(cls):
return FalseAdd header of Rule.isValid method
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Rule import Rule
class IsMethodsRuleExtension(Rule):
@classmethod
def is_regular(cls):
return False
@classmethod
def is_contextfree(cls):
return False
@classmethod
def is_context(cls):
return False
@classmethod
def is_unrestricted(cls):
return False
@classmethod
def is_valid(cls):
return False
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Rule import Rule
class IsMethodsRuleExtension(Rule):
@classmethod
def is_regular(cls):
return False
@classmethod
def is_contextfree(cls):
return False
@classmethod
def is_context(cls):
return False
@classmethod
def is_unrestricted(cls):
return False<commit_msg>Add header of Rule.isValid method<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Rule import Rule
class IsMethodsRuleExtension(Rule):
@classmethod
def is_regular(cls):
return False
@classmethod
def is_contextfree(cls):
return False
@classmethod
def is_context(cls):
return False
@classmethod
def is_unrestricted(cls):
return False
@classmethod
def is_valid(cls):
return False
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Rule import Rule
class IsMethodsRuleExtension(Rule):
@classmethod
def is_regular(cls):
return False
@classmethod
def is_contextfree(cls):
return False
@classmethod
def is_context(cls):
return False
@classmethod
def is_unrestricted(cls):
return FalseAdd header of Rule.isValid method#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Rule import Rule
class IsMethodsRuleExtension(Rule):
@classmethod
def is_regular(cls):
return False
@classmethod
def is_contextfree(cls):
return False
@classmethod
def is_context(cls):
return False
@classmethod
def is_unrestricted(cls):
return False
@classmethod
def is_valid(cls):
return False
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Rule import Rule
class IsMethodsRuleExtension(Rule):
@classmethod
def is_regular(cls):
return False
@classmethod
def is_contextfree(cls):
return False
@classmethod
def is_context(cls):
return False
@classmethod
def is_unrestricted(cls):
return False<commit_msg>Add header of Rule.isValid method<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Rule import Rule
class IsMethodsRuleExtension(Rule):
@classmethod
def is_regular(cls):
return False
@classmethod
def is_contextfree(cls):
return False
@classmethod
def is_context(cls):
return False
@classmethod
def is_unrestricted(cls):
return False
@classmethod
def is_valid(cls):
return False
|
d9324be744dd19720b1c31c520f7189ffffbccd9
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bartosz Kruszczynski
# Copyright (c) 2015 Bartosz Kruszczynski
#
# License: MIT
#
"""This module exports the Reek plugin class."""
from SublimeLinter.lint import RubyLinter
class Reek(RubyLinter):
"""Provides an interface to reek."""
syntax = (
'better rspec',
'betterruby',
'cucumber steps',
'rspec',
'ruby experimental',
'ruby on rails',
'ruby'
)
cmd = 'ruby -S reek'
regex = r'^.+?\[(?P<line>\d+).*\]:(?P<message>.+)'
tempfile_suffix = 'rb'
version_args = '-S reek -v'
version_re = r'reek\s(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.5.0'
config_file = ('-c', 'config.reek')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bartosz Kruszczynski
# Copyright (c) 2015 Bartosz Kruszczynski
#
# License: MIT
#
"""This module exports the Reek plugin class."""
from SublimeLinter.lint import RubyLinter
class Reek(RubyLinter):
"""Provides an interface to reek."""
syntax = (
'better rspec',
'betterruby',
'cucumber steps',
'rspec',
'ruby experimental',
'ruby on rails',
'ruby'
)
cmd = 'ruby -S reek'
regex = r'^.+?\[(?P<line>\d+).*\]:(?P<message>.+) \[.*\]'
tempfile_suffix = 'rb'
version_args = '-S reek -v'
version_re = r'reek\s(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.5.0'
config_file = ('-c', 'config.reek')
|
Remove the wiki link from the smell message
|
Remove the wiki link from the smell message
|
Python
|
mit
|
codequest-eu/SublimeLinter-contrib-reek
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bartosz Kruszczynski
# Copyright (c) 2015 Bartosz Kruszczynski
#
# License: MIT
#
"""This module exports the Reek plugin class."""
from SublimeLinter.lint import RubyLinter
class Reek(RubyLinter):
"""Provides an interface to reek."""
syntax = (
'better rspec',
'betterruby',
'cucumber steps',
'rspec',
'ruby experimental',
'ruby on rails',
'ruby'
)
cmd = 'ruby -S reek'
regex = r'^.+?\[(?P<line>\d+).*\]:(?P<message>.+)'
tempfile_suffix = 'rb'
version_args = '-S reek -v'
version_re = r'reek\s(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.5.0'
config_file = ('-c', 'config.reek')
Remove the wiki link from the smell message
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bartosz Kruszczynski
# Copyright (c) 2015 Bartosz Kruszczynski
#
# License: MIT
#
"""This module exports the Reek plugin class."""
from SublimeLinter.lint import RubyLinter
class Reek(RubyLinter):
"""Provides an interface to reek."""
syntax = (
'better rspec',
'betterruby',
'cucumber steps',
'rspec',
'ruby experimental',
'ruby on rails',
'ruby'
)
cmd = 'ruby -S reek'
regex = r'^.+?\[(?P<line>\d+).*\]:(?P<message>.+) \[.*\]'
tempfile_suffix = 'rb'
version_args = '-S reek -v'
version_re = r'reek\s(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.5.0'
config_file = ('-c', 'config.reek')
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bartosz Kruszczynski
# Copyright (c) 2015 Bartosz Kruszczynski
#
# License: MIT
#
"""This module exports the Reek plugin class."""
from SublimeLinter.lint import RubyLinter
class Reek(RubyLinter):
"""Provides an interface to reek."""
syntax = (
'better rspec',
'betterruby',
'cucumber steps',
'rspec',
'ruby experimental',
'ruby on rails',
'ruby'
)
cmd = 'ruby -S reek'
regex = r'^.+?\[(?P<line>\d+).*\]:(?P<message>.+)'
tempfile_suffix = 'rb'
version_args = '-S reek -v'
version_re = r'reek\s(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.5.0'
config_file = ('-c', 'config.reek')
<commit_msg>Remove the wiki link from the smell message<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bartosz Kruszczynski
# Copyright (c) 2015 Bartosz Kruszczynski
#
# License: MIT
#
"""This module exports the Reek plugin class."""
from SublimeLinter.lint import RubyLinter
class Reek(RubyLinter):
"""Provides an interface to reek."""
syntax = (
'better rspec',
'betterruby',
'cucumber steps',
'rspec',
'ruby experimental',
'ruby on rails',
'ruby'
)
cmd = 'ruby -S reek'
regex = r'^.+?\[(?P<line>\d+).*\]:(?P<message>.+) \[.*\]'
tempfile_suffix = 'rb'
version_args = '-S reek -v'
version_re = r'reek\s(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.5.0'
config_file = ('-c', 'config.reek')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bartosz Kruszczynski
# Copyright (c) 2015 Bartosz Kruszczynski
#
# License: MIT
#
"""This module exports the Reek plugin class."""
from SublimeLinter.lint import RubyLinter
class Reek(RubyLinter):
"""Provides an interface to reek."""
syntax = (
'better rspec',
'betterruby',
'cucumber steps',
'rspec',
'ruby experimental',
'ruby on rails',
'ruby'
)
cmd = 'ruby -S reek'
regex = r'^.+?\[(?P<line>\d+).*\]:(?P<message>.+)'
tempfile_suffix = 'rb'
version_args = '-S reek -v'
version_re = r'reek\s(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.5.0'
config_file = ('-c', 'config.reek')
Remove the wiki link from the smell message#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bartosz Kruszczynski
# Copyright (c) 2015 Bartosz Kruszczynski
#
# License: MIT
#
"""This module exports the Reek plugin class."""
from SublimeLinter.lint import RubyLinter
class Reek(RubyLinter):
"""Provides an interface to reek."""
syntax = (
'better rspec',
'betterruby',
'cucumber steps',
'rspec',
'ruby experimental',
'ruby on rails',
'ruby'
)
cmd = 'ruby -S reek'
regex = r'^.+?\[(?P<line>\d+).*\]:(?P<message>.+) \[.*\]'
tempfile_suffix = 'rb'
version_args = '-S reek -v'
version_re = r'reek\s(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.5.0'
config_file = ('-c', 'config.reek')
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bartosz Kruszczynski
# Copyright (c) 2015 Bartosz Kruszczynski
#
# License: MIT
#
"""This module exports the Reek plugin class."""
from SublimeLinter.lint import RubyLinter
class Reek(RubyLinter):
"""Provides an interface to reek."""
syntax = (
'better rspec',
'betterruby',
'cucumber steps',
'rspec',
'ruby experimental',
'ruby on rails',
'ruby'
)
cmd = 'ruby -S reek'
regex = r'^.+?\[(?P<line>\d+).*\]:(?P<message>.+)'
tempfile_suffix = 'rb'
version_args = '-S reek -v'
version_re = r'reek\s(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.5.0'
config_file = ('-c', 'config.reek')
<commit_msg>Remove the wiki link from the smell message<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bartosz Kruszczynski
# Copyright (c) 2015 Bartosz Kruszczynski
#
# License: MIT
#
"""This module exports the Reek plugin class."""
from SublimeLinter.lint import RubyLinter
class Reek(RubyLinter):
"""Provides an interface to reek."""
syntax = (
'better rspec',
'betterruby',
'cucumber steps',
'rspec',
'ruby experimental',
'ruby on rails',
'ruby'
)
cmd = 'ruby -S reek'
regex = r'^.+?\[(?P<line>\d+).*\]:(?P<message>.+) \[.*\]'
tempfile_suffix = 'rb'
version_args = '-S reek -v'
version_re = r'reek\s(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.5.0'
config_file = ('-c', 'config.reek')
|
aff606998eccb328a48323f79d26d6c96ad4900a
|
doc/examples/plot_piecewise_affine.py
|
doc/examples/plot_piecewise_affine.py
|
"""
===============================
Piecewise Affine Transformation
===============================
This example shows how to use the Piecewise Affine Transformation.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage.transform import PiecewiseAffineTransform, warp
from skimage import data
image = data.lena()
rows, cols = image.shape[0], image.shape[1]
src_cols = np.linspace(0, cols, 20)
src_rows = np.linspace(0, rows, 20)
src_rows, src_cols = np.meshgrid(src_rows, src_cols)
src = np.dstack([src_cols.flat, src_rows.flat])[0]
# add sinusoidal oscillation to row coordinates
dst_rows = src[:, 1] - np.sin(np.linspace(0, 3 * np.pi, src.shape[0])) * 50
dst_cols = src[:, 0]
dst_rows *= 1.5
dst_rows -= 1.5 * 50
dst = np.vstack([dst_cols, dst_rows]).T
tform = PiecewiseAffineTransform()
tform.estimate(src, dst)
output_shape = (image.shape[0] - 1.5 * 50, image.shape[1])
out = warp(image, tform, output_shape=output_shape)
plt.imshow(out)
plt.show()
|
"""
===============================
Piecewise Affine Transformation
===============================
This example shows how to use the Piecewise Affine Transformation.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage.transform import PiecewiseAffineTransform, warp
from skimage import data
image = data.lena()
rows, cols = image.shape[0], image.shape[1]
src_cols = np.linspace(0, cols, 20)
src_rows = np.linspace(0, rows, 10)
src_rows, src_cols = np.meshgrid(src_rows, src_cols)
src = np.dstack([src_cols.flat, src_rows.flat])[0]
# add sinusoidal oscillation to row coordinates
dst_rows = src[:, 1] - np.sin(np.linspace(0, 3 * np.pi, src.shape[0])) * 50
dst_cols = src[:, 0]
dst_rows *= 1.5
dst_rows -= 1.5 * 50
dst = np.vstack([dst_cols, dst_rows]).T
tform = PiecewiseAffineTransform()
tform.estimate(src, dst)
out_rows = image.shape[0] - 1.5 * 50
out_cols = cols
out = warp(image, tform, output_shape=(out_rows, out_cols))
plt.imshow(out)
plt.plot(tform.inverse(src)[:, 0], tform.inverse(src)[:, 1], '.b')
plt.axis((0, out_cols, out_rows, 0))
plt.show()
|
Add mesh points to plot
|
Add mesh points to plot
|
Python
|
bsd-3-clause
|
almarklein/scikit-image,paalge/scikit-image,chintak/scikit-image,keflavich/scikit-image,emon10005/scikit-image,SamHames/scikit-image,oew1v07/scikit-image,chriscrosscutler/scikit-image,SamHames/scikit-image,rjeli/scikit-image,pratapvardhan/scikit-image,ajaybhat/scikit-image,almarklein/scikit-image,GaZ3ll3/scikit-image,jwiggins/scikit-image,blink1073/scikit-image,robintw/scikit-image,almarklein/scikit-image,chriscrosscutler/scikit-image,bennlich/scikit-image,chintak/scikit-image,Hiyorimi/scikit-image,Midafi/scikit-image,pratapvardhan/scikit-image,Midafi/scikit-image,youprofit/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,bsipocz/scikit-image,michaelpacer/scikit-image,Britefury/scikit-image,ofgulban/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,emon10005/scikit-image,ClinicalGraphics/scikit-image,vighneshbirodkar/scikit-image,bsipocz/scikit-image,juliusbierk/scikit-image,newville/scikit-image,Britefury/scikit-image,robintw/scikit-image,keflavich/scikit-image,vighneshbirodkar/scikit-image,youprofit/scikit-image,ajaybhat/scikit-image,almarklein/scikit-image,michaelaye/scikit-image,Hiyorimi/scikit-image,oew1v07/scikit-image,paalge/scikit-image,rjeli/scikit-image,michaelaye/scikit-image,warmspringwinds/scikit-image,dpshelio/scikit-image,warmspringwinds/scikit-image,newville/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,SamHames/scikit-image,rjeli/scikit-image,juliusbierk/scikit-image,bennlich/scikit-image,chintak/scikit-image,GaZ3ll3/scikit-image,paalge/scikit-image,jwiggins/scikit-image,dpshelio/scikit-image,blink1073/scikit-image,WarrenWeckesser/scikits-image,SamHames/scikit-image,ClinicalGraphics/scikit-image
|
"""
===============================
Piecewise Affine Transformation
===============================
This example shows how to use the Piecewise Affine Transformation.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage.transform import PiecewiseAffineTransform, warp
from skimage import data
image = data.lena()
rows, cols = image.shape[0], image.shape[1]
src_cols = np.linspace(0, cols, 20)
src_rows = np.linspace(0, rows, 20)
src_rows, src_cols = np.meshgrid(src_rows, src_cols)
src = np.dstack([src_cols.flat, src_rows.flat])[0]
# add sinusoidal oscillation to row coordinates
dst_rows = src[:, 1] - np.sin(np.linspace(0, 3 * np.pi, src.shape[0])) * 50
dst_cols = src[:, 0]
dst_rows *= 1.5
dst_rows -= 1.5 * 50
dst = np.vstack([dst_cols, dst_rows]).T
tform = PiecewiseAffineTransform()
tform.estimate(src, dst)
output_shape = (image.shape[0] - 1.5 * 50, image.shape[1])
out = warp(image, tform, output_shape=output_shape)
plt.imshow(out)
plt.show()
Add mesh points to plot
|
"""
===============================
Piecewise Affine Transformation
===============================
This example shows how to use the Piecewise Affine Transformation.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage.transform import PiecewiseAffineTransform, warp
from skimage import data
image = data.lena()
rows, cols = image.shape[0], image.shape[1]
src_cols = np.linspace(0, cols, 20)
src_rows = np.linspace(0, rows, 10)
src_rows, src_cols = np.meshgrid(src_rows, src_cols)
src = np.dstack([src_cols.flat, src_rows.flat])[0]
# add sinusoidal oscillation to row coordinates
dst_rows = src[:, 1] - np.sin(np.linspace(0, 3 * np.pi, src.shape[0])) * 50
dst_cols = src[:, 0]
dst_rows *= 1.5
dst_rows -= 1.5 * 50
dst = np.vstack([dst_cols, dst_rows]).T
tform = PiecewiseAffineTransform()
tform.estimate(src, dst)
out_rows = image.shape[0] - 1.5 * 50
out_cols = cols
out = warp(image, tform, output_shape=(out_rows, out_cols))
plt.imshow(out)
plt.plot(tform.inverse(src)[:, 0], tform.inverse(src)[:, 1], '.b')
plt.axis((0, out_cols, out_rows, 0))
plt.show()
|
<commit_before>"""
===============================
Piecewise Affine Transformation
===============================
This example shows how to use the Piecewise Affine Transformation.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage.transform import PiecewiseAffineTransform, warp
from skimage import data
image = data.lena()
rows, cols = image.shape[0], image.shape[1]
src_cols = np.linspace(0, cols, 20)
src_rows = np.linspace(0, rows, 20)
src_rows, src_cols = np.meshgrid(src_rows, src_cols)
src = np.dstack([src_cols.flat, src_rows.flat])[0]
# add sinusoidal oscillation to row coordinates
dst_rows = src[:, 1] - np.sin(np.linspace(0, 3 * np.pi, src.shape[0])) * 50
dst_cols = src[:, 0]
dst_rows *= 1.5
dst_rows -= 1.5 * 50
dst = np.vstack([dst_cols, dst_rows]).T
tform = PiecewiseAffineTransform()
tform.estimate(src, dst)
output_shape = (image.shape[0] - 1.5 * 50, image.shape[1])
out = warp(image, tform, output_shape=output_shape)
plt.imshow(out)
plt.show()
<commit_msg>Add mesh points to plot<commit_after>
|
"""
===============================
Piecewise Affine Transformation
===============================
This example shows how to use the Piecewise Affine Transformation.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage.transform import PiecewiseAffineTransform, warp
from skimage import data
image = data.lena()
rows, cols = image.shape[0], image.shape[1]
src_cols = np.linspace(0, cols, 20)
src_rows = np.linspace(0, rows, 10)
src_rows, src_cols = np.meshgrid(src_rows, src_cols)
src = np.dstack([src_cols.flat, src_rows.flat])[0]
# add sinusoidal oscillation to row coordinates
dst_rows = src[:, 1] - np.sin(np.linspace(0, 3 * np.pi, src.shape[0])) * 50
dst_cols = src[:, 0]
dst_rows *= 1.5
dst_rows -= 1.5 * 50
dst = np.vstack([dst_cols, dst_rows]).T
tform = PiecewiseAffineTransform()
tform.estimate(src, dst)
out_rows = image.shape[0] - 1.5 * 50
out_cols = cols
out = warp(image, tform, output_shape=(out_rows, out_cols))
plt.imshow(out)
plt.plot(tform.inverse(src)[:, 0], tform.inverse(src)[:, 1], '.b')
plt.axis((0, out_cols, out_rows, 0))
plt.show()
|
"""
===============================
Piecewise Affine Transformation
===============================
This example shows how to use the Piecewise Affine Transformation.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage.transform import PiecewiseAffineTransform, warp
from skimage import data
image = data.lena()
rows, cols = image.shape[0], image.shape[1]
src_cols = np.linspace(0, cols, 20)
src_rows = np.linspace(0, rows, 20)
src_rows, src_cols = np.meshgrid(src_rows, src_cols)
src = np.dstack([src_cols.flat, src_rows.flat])[0]
# add sinusoidal oscillation to row coordinates
dst_rows = src[:, 1] - np.sin(np.linspace(0, 3 * np.pi, src.shape[0])) * 50
dst_cols = src[:, 0]
dst_rows *= 1.5
dst_rows -= 1.5 * 50
dst = np.vstack([dst_cols, dst_rows]).T
tform = PiecewiseAffineTransform()
tform.estimate(src, dst)
output_shape = (image.shape[0] - 1.5 * 50, image.shape[1])
out = warp(image, tform, output_shape=output_shape)
plt.imshow(out)
plt.show()
Add mesh points to plot"""
===============================
Piecewise Affine Transformation
===============================
This example shows how to use the Piecewise Affine Transformation.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage.transform import PiecewiseAffineTransform, warp
from skimage import data
image = data.lena()
rows, cols = image.shape[0], image.shape[1]
src_cols = np.linspace(0, cols, 20)
src_rows = np.linspace(0, rows, 10)
src_rows, src_cols = np.meshgrid(src_rows, src_cols)
src = np.dstack([src_cols.flat, src_rows.flat])[0]
# add sinusoidal oscillation to row coordinates
dst_rows = src[:, 1] - np.sin(np.linspace(0, 3 * np.pi, src.shape[0])) * 50
dst_cols = src[:, 0]
dst_rows *= 1.5
dst_rows -= 1.5 * 50
dst = np.vstack([dst_cols, dst_rows]).T
tform = PiecewiseAffineTransform()
tform.estimate(src, dst)
out_rows = image.shape[0] - 1.5 * 50
out_cols = cols
out = warp(image, tform, output_shape=(out_rows, out_cols))
plt.imshow(out)
plt.plot(tform.inverse(src)[:, 0], tform.inverse(src)[:, 1], '.b')
plt.axis((0, out_cols, out_rows, 0))
plt.show()
|
<commit_before>"""
===============================
Piecewise Affine Transformation
===============================
This example shows how to use the Piecewise Affine Transformation.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage.transform import PiecewiseAffineTransform, warp
from skimage import data
image = data.lena()
rows, cols = image.shape[0], image.shape[1]
src_cols = np.linspace(0, cols, 20)
src_rows = np.linspace(0, rows, 20)
src_rows, src_cols = np.meshgrid(src_rows, src_cols)
src = np.dstack([src_cols.flat, src_rows.flat])[0]
# add sinusoidal oscillation to row coordinates
dst_rows = src[:, 1] - np.sin(np.linspace(0, 3 * np.pi, src.shape[0])) * 50
dst_cols = src[:, 0]
dst_rows *= 1.5
dst_rows -= 1.5 * 50
dst = np.vstack([dst_cols, dst_rows]).T
tform = PiecewiseAffineTransform()
tform.estimate(src, dst)
output_shape = (image.shape[0] - 1.5 * 50, image.shape[1])
out = warp(image, tform, output_shape=output_shape)
plt.imshow(out)
plt.show()
<commit_msg>Add mesh points to plot<commit_after>"""
===============================
Piecewise Affine Transformation
===============================
This example shows how to use the Piecewise Affine Transformation.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage.transform import PiecewiseAffineTransform, warp
from skimage import data
image = data.lena()
rows, cols = image.shape[0], image.shape[1]
src_cols = np.linspace(0, cols, 20)
src_rows = np.linspace(0, rows, 10)
src_rows, src_cols = np.meshgrid(src_rows, src_cols)
src = np.dstack([src_cols.flat, src_rows.flat])[0]
# add sinusoidal oscillation to row coordinates
dst_rows = src[:, 1] - np.sin(np.linspace(0, 3 * np.pi, src.shape[0])) * 50
dst_cols = src[:, 0]
dst_rows *= 1.5
dst_rows -= 1.5 * 50
dst = np.vstack([dst_cols, dst_rows]).T
tform = PiecewiseAffineTransform()
tform.estimate(src, dst)
out_rows = image.shape[0] - 1.5 * 50
out_cols = cols
out = warp(image, tform, output_shape=(out_rows, out_cols))
plt.imshow(out)
plt.plot(tform.inverse(src)[:, 0], tform.inverse(src)[:, 1], '.b')
plt.axis((0, out_cols, out_rows, 0))
plt.show()
|
959d20df781edb9f283f5317f50e8000f83e7ab6
|
tests/rules/test_no_such_file.py
|
tests/rules/test_no_such_file.py
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/', stderr=""),
Command(script='mv foo bar/foo', stderr="mv: permission denied"),
])
def test_not_match(command):
assert not match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
Add `test_not_match` to `no_such_file` tests
|
Add `test_not_match` to `no_such_file` tests
|
Python
|
mit
|
manashmndl/thefuck,levythu/thefuck,qingying5810/thefuck,mlk/thefuck,vanita5/thefuck,artiya4u/thefuck,nvbn/thefuck,ostree/thefuck,lawrencebenson/thefuck,sekaiamber/thefuck,manashmndl/thefuck,thinkerchan/thefuck,princeofdarkness76/thefuck,subajat1/thefuck,PLNech/thefuck,lawrencebenson/thefuck,roth1002/thefuck,bigplus/thefuck,princeofdarkness76/thefuck,beni55/thefuck,zhangzhishan/thefuck,redreamality/thefuck,ostree/thefuck,NguyenHoaiNam/thefuck,hxddh/thefuck,BertieJim/thefuck,thesoulkiller/thefuck,bigplus/thefuck,mlk/thefuck,barneyElDinosaurio/thefuck,bugaevc/thefuck,Clpsplug/thefuck,mcarton/thefuck,SimenB/thefuck,MJerty/thefuck,levythu/thefuck,BertieJim/thefuck,vanita5/thefuck,Aeron/thefuck,thesoulkiller/thefuck,subajat1/thefuck,PLNech/thefuck,SimenB/thefuck,gogobebe2/thefuck,AntonChankin/thefuck,LawrenceHan/thefuck,mcarton/thefuck,AntonChankin/thefuck,mbbill/thefuck,Clpsplug/thefuck,LawrenceHan/thefuck,scorphus/thefuck,beni55/thefuck,MJerty/thefuck,scorphus/thefuck,thinkerchan/thefuck,redreamality/thefuck,nvbn/thefuck,roth1002/thefuck,hxddh/thefuck,barneyElDinosaurio/thefuck
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
Add `test_not_match` to `no_such_file` tests
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/', stderr=""),
Command(script='mv foo bar/foo', stderr="mv: permission denied"),
])
def test_not_match(command):
assert not match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
<commit_before>import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
<commit_msg>Add `test_not_match` to `no_such_file` tests<commit_after>
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/', stderr=""),
Command(script='mv foo bar/foo', stderr="mv: permission denied"),
])
def test_not_match(command):
assert not match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
Add `test_not_match` to `no_such_file` testsimport pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/', stderr=""),
Command(script='mv foo bar/foo', stderr="mv: permission denied"),
])
def test_not_match(command):
assert not match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
<commit_before>import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
<commit_msg>Add `test_not_match` to `no_such_file` tests<commit_after>import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/', stderr=""),
Command(script='mv foo bar/foo', stderr="mv: permission denied"),
])
def test_not_match(command):
assert not match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
6ec61fc80ea8c3626b507d20d6c95d64ae4216c0
|
tests/twisted/connect/timeout.py
|
tests/twisted/connect/timeout.py
|
"""
Test that Gabble times out the connection process after a while if the server
stops responding at various points. Real Gabbles time out after a minute; the
test suite's Gabble times out after a couple of seconds.
"""
from servicetest import assertEquals
from gabbletest import exec_test, XmppAuthenticator
import constants as cs
import ns
class NoStreamHeader(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def streamStarted(self, root=None):
return
class NoAuthInfoResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def auth(self, auth):
return
class NoAuthResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def bindIq(self, iq):
return
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])
e = q.expect('dbus-signal', signal='StatusChanged')
status, reason = e.args
assertEquals(cs.CONN_STATUS_DISCONNECTED, status)
assertEquals(cs.CSR_NETWORK_ERROR, reason)
if __name__ == '__main__':
exec_test(test, authenticator=NoStreamHeader())
exec_test(test, authenticator=NoAuthInfoResult())
exec_test(test, authenticator=NoAuthResult())
|
"""
Test that Gabble times out the connection process after a while if the server
stops responding at various points. Real Gabbles time out after a minute; the
test suite's Gabble times out after a couple of seconds.
"""
from servicetest import assertEquals
from gabbletest import exec_test, XmppAuthenticator
import constants as cs
import ns
class NoStreamHeader(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def streamStarted(self, root=None):
pass
class NoAuthInfoResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def auth(self, auth):
pass
class NoAuthResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def bindIq(self, iq):
pass
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])
e = q.expect('dbus-signal', signal='StatusChanged')
status, reason = e.args
assertEquals(cs.CONN_STATUS_DISCONNECTED, status)
assertEquals(cs.CSR_NETWORK_ERROR, reason)
if __name__ == '__main__':
exec_test(test, authenticator=NoStreamHeader())
exec_test(test, authenticator=NoAuthInfoResult())
exec_test(test, authenticator=NoAuthResult())
|
Use 'pass', not 'return', for empty Python methods
|
Use 'pass', not 'return', for empty Python methods
|
Python
|
lgpl-2.1
|
community-ssu/telepathy-gabble,community-ssu/telepathy-gabble,community-ssu/telepathy-gabble,community-ssu/telepathy-gabble
|
"""
Test that Gabble times out the connection process after a while if the server
stops responding at various points. Real Gabbles time out after a minute; the
test suite's Gabble times out after a couple of seconds.
"""
from servicetest import assertEquals
from gabbletest import exec_test, XmppAuthenticator
import constants as cs
import ns
class NoStreamHeader(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def streamStarted(self, root=None):
return
class NoAuthInfoResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def auth(self, auth):
return
class NoAuthResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def bindIq(self, iq):
return
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])
e = q.expect('dbus-signal', signal='StatusChanged')
status, reason = e.args
assertEquals(cs.CONN_STATUS_DISCONNECTED, status)
assertEquals(cs.CSR_NETWORK_ERROR, reason)
if __name__ == '__main__':
exec_test(test, authenticator=NoStreamHeader())
exec_test(test, authenticator=NoAuthInfoResult())
exec_test(test, authenticator=NoAuthResult())
Use 'pass', not 'return', for empty Python methods
|
"""
Test that Gabble times out the connection process after a while if the server
stops responding at various points. Real Gabbles time out after a minute; the
test suite's Gabble times out after a couple of seconds.
"""
from servicetest import assertEquals
from gabbletest import exec_test, XmppAuthenticator
import constants as cs
import ns
class NoStreamHeader(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def streamStarted(self, root=None):
pass
class NoAuthInfoResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def auth(self, auth):
pass
class NoAuthResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def bindIq(self, iq):
pass
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])
e = q.expect('dbus-signal', signal='StatusChanged')
status, reason = e.args
assertEquals(cs.CONN_STATUS_DISCONNECTED, status)
assertEquals(cs.CSR_NETWORK_ERROR, reason)
if __name__ == '__main__':
exec_test(test, authenticator=NoStreamHeader())
exec_test(test, authenticator=NoAuthInfoResult())
exec_test(test, authenticator=NoAuthResult())
|
<commit_before>"""
Test that Gabble times out the connection process after a while if the server
stops responding at various points. Real Gabbles time out after a minute; the
test suite's Gabble times out after a couple of seconds.
"""
from servicetest import assertEquals
from gabbletest import exec_test, XmppAuthenticator
import constants as cs
import ns
class NoStreamHeader(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def streamStarted(self, root=None):
return
class NoAuthInfoResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def auth(self, auth):
return
class NoAuthResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def bindIq(self, iq):
return
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])
e = q.expect('dbus-signal', signal='StatusChanged')
status, reason = e.args
assertEquals(cs.CONN_STATUS_DISCONNECTED, status)
assertEquals(cs.CSR_NETWORK_ERROR, reason)
if __name__ == '__main__':
exec_test(test, authenticator=NoStreamHeader())
exec_test(test, authenticator=NoAuthInfoResult())
exec_test(test, authenticator=NoAuthResult())
<commit_msg>Use 'pass', not 'return', for empty Python methods<commit_after>
|
"""
Test that Gabble times out the connection process after a while if the server
stops responding at various points. Real Gabbles time out after a minute; the
test suite's Gabble times out after a couple of seconds.
"""
from servicetest import assertEquals
from gabbletest import exec_test, XmppAuthenticator
import constants as cs
import ns
class NoStreamHeader(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def streamStarted(self, root=None):
pass
class NoAuthInfoResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def auth(self, auth):
pass
class NoAuthResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def bindIq(self, iq):
pass
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])
e = q.expect('dbus-signal', signal='StatusChanged')
status, reason = e.args
assertEquals(cs.CONN_STATUS_DISCONNECTED, status)
assertEquals(cs.CSR_NETWORK_ERROR, reason)
if __name__ == '__main__':
exec_test(test, authenticator=NoStreamHeader())
exec_test(test, authenticator=NoAuthInfoResult())
exec_test(test, authenticator=NoAuthResult())
|
"""
Test that Gabble times out the connection process after a while if the server
stops responding at various points. Real Gabbles time out after a minute; the
test suite's Gabble times out after a couple of seconds.
"""
from servicetest import assertEquals
from gabbletest import exec_test, XmppAuthenticator
import constants as cs
import ns
class NoStreamHeader(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def streamStarted(self, root=None):
return
class NoAuthInfoResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def auth(self, auth):
return
class NoAuthResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def bindIq(self, iq):
return
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])
e = q.expect('dbus-signal', signal='StatusChanged')
status, reason = e.args
assertEquals(cs.CONN_STATUS_DISCONNECTED, status)
assertEquals(cs.CSR_NETWORK_ERROR, reason)
if __name__ == '__main__':
exec_test(test, authenticator=NoStreamHeader())
exec_test(test, authenticator=NoAuthInfoResult())
exec_test(test, authenticator=NoAuthResult())
Use 'pass', not 'return', for empty Python methods"""
Test that Gabble times out the connection process after a while if the server
stops responding at various points. Real Gabbles time out after a minute; the
test suite's Gabble times out after a couple of seconds.
"""
from servicetest import assertEquals
from gabbletest import exec_test, XmppAuthenticator
import constants as cs
import ns
class NoStreamHeader(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def streamStarted(self, root=None):
pass
class NoAuthInfoResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def auth(self, auth):
pass
class NoAuthResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def bindIq(self, iq):
pass
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])
e = q.expect('dbus-signal', signal='StatusChanged')
status, reason = e.args
assertEquals(cs.CONN_STATUS_DISCONNECTED, status)
assertEquals(cs.CSR_NETWORK_ERROR, reason)
if __name__ == '__main__':
exec_test(test, authenticator=NoStreamHeader())
exec_test(test, authenticator=NoAuthInfoResult())
exec_test(test, authenticator=NoAuthResult())
|
<commit_before>"""
Test that Gabble times out the connection process after a while if the server
stops responding at various points. Real Gabbles time out after a minute; the
test suite's Gabble times out after a couple of seconds.
"""
from servicetest import assertEquals
from gabbletest import exec_test, XmppAuthenticator
import constants as cs
import ns
class NoStreamHeader(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def streamStarted(self, root=None):
return
class NoAuthInfoResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def auth(self, auth):
return
class NoAuthResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def bindIq(self, iq):
return
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])
e = q.expect('dbus-signal', signal='StatusChanged')
status, reason = e.args
assertEquals(cs.CONN_STATUS_DISCONNECTED, status)
assertEquals(cs.CSR_NETWORK_ERROR, reason)
if __name__ == '__main__':
exec_test(test, authenticator=NoStreamHeader())
exec_test(test, authenticator=NoAuthInfoResult())
exec_test(test, authenticator=NoAuthResult())
<commit_msg>Use 'pass', not 'return', for empty Python methods<commit_after>"""
Test that Gabble times out the connection process after a while if the server
stops responding at various points. Real Gabbles time out after a minute; the
test suite's Gabble times out after a couple of seconds.
"""
from servicetest import assertEquals
from gabbletest import exec_test, XmppAuthenticator
import constants as cs
import ns
class NoStreamHeader(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def streamStarted(self, root=None):
pass
class NoAuthInfoResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def auth(self, auth):
pass
class NoAuthResult(XmppAuthenticator):
def __init__(self):
XmppAuthenticator.__init__(self, 'test', 'pass')
def bindIq(self, iq):
pass
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])
e = q.expect('dbus-signal', signal='StatusChanged')
status, reason = e.args
assertEquals(cs.CONN_STATUS_DISCONNECTED, status)
assertEquals(cs.CSR_NETWORK_ERROR, reason)
if __name__ == '__main__':
exec_test(test, authenticator=NoStreamHeader())
exec_test(test, authenticator=NoAuthInfoResult())
exec_test(test, authenticator=NoAuthResult())
|
87446e15eb35ed443f25327e581c350eb19dbe63
|
butter/__init__.py
|
butter/__init__.py
|
#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.2"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
|
#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.3"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
|
Tag version 0.3 for impeding release
|
Tag version 0.3 for impeding release
|
Python
|
bsd-3-clause
|
arkaitzj/python-butter
|
#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.2"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
Tag version 0.3 for impeding release
|
#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.3"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
|
<commit_before>#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.2"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
<commit_msg>Tag version 0.3 for impeding release<commit_after>
|
#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.3"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
|
#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.2"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
Tag version 0.3 for impeding release#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.3"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
|
<commit_before>#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.2"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
<commit_msg>Tag version 0.3 for impeding release<commit_after>#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.3"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
|
9486a6a3dddece5d7b636e54d3cbc59436206a65
|
getversion.py
|
getversion.py
|
#! /usr/bin/python
from __future__ import print_function
import sys
import logging
import icat
import icat.config
logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
conf = icat.config.Config(needlogin=False).getconfig()
client = icat.Client(conf.url, **conf.client_kwargs)
print("Python %s\n" % (sys.version))
print("python-icat version %s (%s)\n" % (icat.__version__, icat.__revision__))
print("Connect to %s\nICAT version %s\n" % (conf.url, client.apiversion))
|
#! /usr/bin/python
from __future__ import print_function
import sys
import logging
import icat
import icat.config
logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
conf = icat.config.Config(needlogin=False, ids="optional").getconfig()
client = icat.Client(conf.url, **conf.client_kwargs)
print("Python %s\n" % (sys.version))
print("python-icat version %s (%s)\n" % (icat.__version__, icat.__revision__))
print("Connect to %s\nICAT version %s\n" % (conf.url, client.apiversion))
if client.ids:
print("Connect to %s\nIDS version %s\n"
% (conf.idsurl, client.ids.apiversion))
|
Connect also to the IDS (if idsurl is set) and report its version.
|
Connect also to the IDS (if idsurl is set) and report its version.
git-svn-id: 5b1347ddac5aba1438c637217dfe0bb137609099@844 8efdbd46-c5fb-49ab-9956-99f62928ec21
|
Python
|
apache-2.0
|
icatproject/python-icat
|
#! /usr/bin/python
from __future__ import print_function
import sys
import logging
import icat
import icat.config
logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
conf = icat.config.Config(needlogin=False).getconfig()
client = icat.Client(conf.url, **conf.client_kwargs)
print("Python %s\n" % (sys.version))
print("python-icat version %s (%s)\n" % (icat.__version__, icat.__revision__))
print("Connect to %s\nICAT version %s\n" % (conf.url, client.apiversion))
Connect also to the IDS (if idsurl is set) and report its version.
git-svn-id: 5b1347ddac5aba1438c637217dfe0bb137609099@844 8efdbd46-c5fb-49ab-9956-99f62928ec21
|
#! /usr/bin/python
from __future__ import print_function
import sys
import logging
import icat
import icat.config
logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
conf = icat.config.Config(needlogin=False, ids="optional").getconfig()
client = icat.Client(conf.url, **conf.client_kwargs)
print("Python %s\n" % (sys.version))
print("python-icat version %s (%s)\n" % (icat.__version__, icat.__revision__))
print("Connect to %s\nICAT version %s\n" % (conf.url, client.apiversion))
if client.ids:
print("Connect to %s\nIDS version %s\n"
% (conf.idsurl, client.ids.apiversion))
|
<commit_before>#! /usr/bin/python
from __future__ import print_function
import sys
import logging
import icat
import icat.config
logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
conf = icat.config.Config(needlogin=False).getconfig()
client = icat.Client(conf.url, **conf.client_kwargs)
print("Python %s\n" % (sys.version))
print("python-icat version %s (%s)\n" % (icat.__version__, icat.__revision__))
print("Connect to %s\nICAT version %s\n" % (conf.url, client.apiversion))
<commit_msg>Connect also to the IDS (if idsurl is set) and report its version.
git-svn-id: 5b1347ddac5aba1438c637217dfe0bb137609099@844 8efdbd46-c5fb-49ab-9956-99f62928ec21<commit_after>
|
#! /usr/bin/python
from __future__ import print_function
import sys
import logging
import icat
import icat.config
logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
conf = icat.config.Config(needlogin=False, ids="optional").getconfig()
client = icat.Client(conf.url, **conf.client_kwargs)
print("Python %s\n" % (sys.version))
print("python-icat version %s (%s)\n" % (icat.__version__, icat.__revision__))
print("Connect to %s\nICAT version %s\n" % (conf.url, client.apiversion))
if client.ids:
print("Connect to %s\nIDS version %s\n"
% (conf.idsurl, client.ids.apiversion))
|
#! /usr/bin/python
from __future__ import print_function
import sys
import logging
import icat
import icat.config
logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
conf = icat.config.Config(needlogin=False).getconfig()
client = icat.Client(conf.url, **conf.client_kwargs)
print("Python %s\n" % (sys.version))
print("python-icat version %s (%s)\n" % (icat.__version__, icat.__revision__))
print("Connect to %s\nICAT version %s\n" % (conf.url, client.apiversion))
Connect also to the IDS (if idsurl is set) and report its version.
git-svn-id: 5b1347ddac5aba1438c637217dfe0bb137609099@844 8efdbd46-c5fb-49ab-9956-99f62928ec21#! /usr/bin/python
from __future__ import print_function
import sys
import logging
import icat
import icat.config
logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
conf = icat.config.Config(needlogin=False, ids="optional").getconfig()
client = icat.Client(conf.url, **conf.client_kwargs)
print("Python %s\n" % (sys.version))
print("python-icat version %s (%s)\n" % (icat.__version__, icat.__revision__))
print("Connect to %s\nICAT version %s\n" % (conf.url, client.apiversion))
if client.ids:
print("Connect to %s\nIDS version %s\n"
% (conf.idsurl, client.ids.apiversion))
|
<commit_before>#! /usr/bin/python
from __future__ import print_function
import sys
import logging
import icat
import icat.config
logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
conf = icat.config.Config(needlogin=False).getconfig()
client = icat.Client(conf.url, **conf.client_kwargs)
print("Python %s\n" % (sys.version))
print("python-icat version %s (%s)\n" % (icat.__version__, icat.__revision__))
print("Connect to %s\nICAT version %s\n" % (conf.url, client.apiversion))
<commit_msg>Connect also to the IDS (if idsurl is set) and report its version.
git-svn-id: 5b1347ddac5aba1438c637217dfe0bb137609099@844 8efdbd46-c5fb-49ab-9956-99f62928ec21<commit_after>#! /usr/bin/python
from __future__ import print_function
import sys
import logging
import icat
import icat.config
logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
conf = icat.config.Config(needlogin=False, ids="optional").getconfig()
client = icat.Client(conf.url, **conf.client_kwargs)
print("Python %s\n" % (sys.version))
print("python-icat version %s (%s)\n" % (icat.__version__, icat.__revision__))
print("Connect to %s\nICAT version %s\n" % (conf.url, client.apiversion))
if client.ids:
print("Connect to %s\nIDS version %s\n"
% (conf.idsurl, client.ids.apiversion))
|
4a05dac1b5f0d24aa13cf5d3bca35b1a70ec9e52
|
filter.py
|
filter.py
|
import matplotlib.pyplot as plt
import scipy.signal
import numpy as np
import time
from signal_functions import *
match_filter = make_match_filter()
signal_in = import_wav("rec.wav")
# plot_waveform(match_filter, downsample=1, title="Match Filter", ax_labels=["Samples", "Magnitude"])
# plot_signal(signal_in, downsample=1)
envelope, convolution = get_envelope(signal_in[:600000])
# plot_waveform(convolution[:350000], downsample=10, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_waveform(envelope[:35000], downsample=1, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_signal(convolution)
interrupt_t, thresholds = find_intterupts(envelope)
data, packet = extract_data(interrupt_t)
plot_envelope_interrupts(envelope, interrupt_t, thresholds)
print("Packet:", packet, "Bits:", len(packet) + 1)
print("Data:", data)
while True:
time.sleep(10)
|
import matplotlib.pyplot as plt
import scipy.signal
import numpy as np
import time
from signal_functions import *
match_filter = make_match_filter()
signal_in = import_wav("rec.wav")
# plot_waveform(match_filter, downsample=1, title="Match Filter", ax_labels=["Samples", "Magnitude"])
# plot_signal(signal_in, downsample=1)
envelope, convolution = get_envelope(signal_in[:600000])
# plot_waveform(convolution[:350000], downsample=10, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_waveform(envelope[:35000], downsample=1, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_signal(convolution)
interrupt_t, thresholds = find_intterupts(envelope)
data, packet = extract_data(interrupt_t)
plot_envelope_interrupts(envelope, interrupt_t, thresholds)
print("Packet:", packet, "Bits:", len(packet) + 1)
print("Data:", chr(data))
while True:
time.sleep(10)
|
Print char repersentation of data
|
Print char repersentation of data
|
Python
|
mit
|
labseven/SigsysFinalProject
|
import matplotlib.pyplot as plt
import scipy.signal
import numpy as np
import time
from signal_functions import *
match_filter = make_match_filter()
signal_in = import_wav("rec.wav")
# plot_waveform(match_filter, downsample=1, title="Match Filter", ax_labels=["Samples", "Magnitude"])
# plot_signal(signal_in, downsample=1)
envelope, convolution = get_envelope(signal_in[:600000])
# plot_waveform(convolution[:350000], downsample=10, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_waveform(envelope[:35000], downsample=1, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_signal(convolution)
interrupt_t, thresholds = find_intterupts(envelope)
data, packet = extract_data(interrupt_t)
plot_envelope_interrupts(envelope, interrupt_t, thresholds)
print("Packet:", packet, "Bits:", len(packet) + 1)
print("Data:", data)
while True:
time.sleep(10)
Print char repersentation of data
|
import matplotlib.pyplot as plt
import scipy.signal
import numpy as np
import time
from signal_functions import *
match_filter = make_match_filter()
signal_in = import_wav("rec.wav")
# plot_waveform(match_filter, downsample=1, title="Match Filter", ax_labels=["Samples", "Magnitude"])
# plot_signal(signal_in, downsample=1)
envelope, convolution = get_envelope(signal_in[:600000])
# plot_waveform(convolution[:350000], downsample=10, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_waveform(envelope[:35000], downsample=1, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_signal(convolution)
interrupt_t, thresholds = find_intterupts(envelope)
data, packet = extract_data(interrupt_t)
plot_envelope_interrupts(envelope, interrupt_t, thresholds)
print("Packet:", packet, "Bits:", len(packet) + 1)
print("Data:", chr(data))
while True:
time.sleep(10)
|
<commit_before>import matplotlib.pyplot as plt
import scipy.signal
import numpy as np
import time
from signal_functions import *
match_filter = make_match_filter()
signal_in = import_wav("rec.wav")
# plot_waveform(match_filter, downsample=1, title="Match Filter", ax_labels=["Samples", "Magnitude"])
# plot_signal(signal_in, downsample=1)
envelope, convolution = get_envelope(signal_in[:600000])
# plot_waveform(convolution[:350000], downsample=10, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_waveform(envelope[:35000], downsample=1, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_signal(convolution)
interrupt_t, thresholds = find_intterupts(envelope)
data, packet = extract_data(interrupt_t)
plot_envelope_interrupts(envelope, interrupt_t, thresholds)
print("Packet:", packet, "Bits:", len(packet) + 1)
print("Data:", data)
while True:
time.sleep(10)
<commit_msg>Print char repersentation of data<commit_after>
|
import matplotlib.pyplot as plt
import scipy.signal
import numpy as np
import time
from signal_functions import *
match_filter = make_match_filter()
signal_in = import_wav("rec.wav")
# plot_waveform(match_filter, downsample=1, title="Match Filter", ax_labels=["Samples", "Magnitude"])
# plot_signal(signal_in, downsample=1)
envelope, convolution = get_envelope(signal_in[:600000])
# plot_waveform(convolution[:350000], downsample=10, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_waveform(envelope[:35000], downsample=1, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_signal(convolution)
interrupt_t, thresholds = find_intterupts(envelope)
data, packet = extract_data(interrupt_t)
plot_envelope_interrupts(envelope, interrupt_t, thresholds)
print("Packet:", packet, "Bits:", len(packet) + 1)
print("Data:", chr(data))
while True:
time.sleep(10)
|
import matplotlib.pyplot as plt
import scipy.signal
import numpy as np
import time
from signal_functions import *
match_filter = make_match_filter()
signal_in = import_wav("rec.wav")
# plot_waveform(match_filter, downsample=1, title="Match Filter", ax_labels=["Samples", "Magnitude"])
# plot_signal(signal_in, downsample=1)
envelope, convolution = get_envelope(signal_in[:600000])
# plot_waveform(convolution[:350000], downsample=10, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_waveform(envelope[:35000], downsample=1, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_signal(convolution)
interrupt_t, thresholds = find_intterupts(envelope)
data, packet = extract_data(interrupt_t)
plot_envelope_interrupts(envelope, interrupt_t, thresholds)
print("Packet:", packet, "Bits:", len(packet) + 1)
print("Data:", data)
while True:
time.sleep(10)
Print char repersentation of dataimport matplotlib.pyplot as plt
import scipy.signal
import numpy as np
import time
from signal_functions import *
match_filter = make_match_filter()
signal_in = import_wav("rec.wav")
# plot_waveform(match_filter, downsample=1, title="Match Filter", ax_labels=["Samples", "Magnitude"])
# plot_signal(signal_in, downsample=1)
envelope, convolution = get_envelope(signal_in[:600000])
# plot_waveform(convolution[:350000], downsample=10, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_waveform(envelope[:35000], downsample=1, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_signal(convolution)
interrupt_t, thresholds = find_intterupts(envelope)
data, packet = extract_data(interrupt_t)
plot_envelope_interrupts(envelope, interrupt_t, thresholds)
print("Packet:", packet, "Bits:", len(packet) + 1)
print("Data:", chr(data))
while True:
time.sleep(10)
|
<commit_before>import matplotlib.pyplot as plt
import scipy.signal
import numpy as np
import time
from signal_functions import *
match_filter = make_match_filter()
signal_in = import_wav("rec.wav")
# plot_waveform(match_filter, downsample=1, title="Match Filter", ax_labels=["Samples", "Magnitude"])
# plot_signal(signal_in, downsample=1)
envelope, convolution = get_envelope(signal_in[:600000])
# plot_waveform(convolution[:350000], downsample=10, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_waveform(envelope[:35000], downsample=1, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_signal(convolution)
interrupt_t, thresholds = find_intterupts(envelope)
data, packet = extract_data(interrupt_t)
plot_envelope_interrupts(envelope, interrupt_t, thresholds)
print("Packet:", packet, "Bits:", len(packet) + 1)
print("Data:", data)
while True:
time.sleep(10)
<commit_msg>Print char repersentation of data<commit_after>import matplotlib.pyplot as plt
import scipy.signal
import numpy as np
import time
from signal_functions import *
match_filter = make_match_filter()
signal_in = import_wav("rec.wav")
# plot_waveform(match_filter, downsample=1, title="Match Filter", ax_labels=["Samples", "Magnitude"])
# plot_signal(signal_in, downsample=1)
envelope, convolution = get_envelope(signal_in[:600000])
# plot_waveform(convolution[:350000], downsample=10, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_waveform(envelope[:35000], downsample=1, title="5kHz Signal after Convolution", ax_labels=["Samples", "Magnitude"])
# plot_signal(convolution)
interrupt_t, thresholds = find_intterupts(envelope)
data, packet = extract_data(interrupt_t)
plot_envelope_interrupts(envelope, interrupt_t, thresholds)
print("Packet:", packet, "Bits:", len(packet) + 1)
print("Data:", chr(data))
while True:
time.sleep(10)
|
2807e2c39e54046cb750c290cb7b12b289e1cd9a
|
test/test_indexing.py
|
test/test_indexing.py
|
import pytest
import os
import shutil
import xarray as xr
from cosima_cookbook import database
from dask.distributed import Client
from sqlalchemy import select, func
@pytest.fixture(scope='module')
def client():
return Client()
def test_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 0)
q = select([func.count()]).select_from(schema['ncvars'])
r = conn.execute(q)
assert(r.first()[0] == 0)
def test_single_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/single_broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 1)
|
import pytest
import os
import shutil
import xarray as xr
from cosima_cookbook import database
from dask.distributed import Client
from sqlalchemy import select, func
@pytest.fixture(scope='module')
def client():
client = Client()
yield client
client.close()
def test_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 0)
q = select([func.count()]).select_from(schema['ncvars'])
r = conn.execute(q)
assert(r.first()[0] == 0)
def test_single_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/single_broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 1)
|
Clean up dask client in indexing test
|
Clean up dask client in indexing test
|
Python
|
apache-2.0
|
OceansAus/cosima-cookbook
|
import pytest
import os
import shutil
import xarray as xr
from cosima_cookbook import database
from dask.distributed import Client
from sqlalchemy import select, func
@pytest.fixture(scope='module')
def client():
return Client()
def test_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 0)
q = select([func.count()]).select_from(schema['ncvars'])
r = conn.execute(q)
assert(r.first()[0] == 0)
def test_single_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/single_broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 1)
Clean up dask client in indexing test
|
import pytest
import os
import shutil
import xarray as xr
from cosima_cookbook import database
from dask.distributed import Client
from sqlalchemy import select, func
@pytest.fixture(scope='module')
def client():
client = Client()
yield client
client.close()
def test_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 0)
q = select([func.count()]).select_from(schema['ncvars'])
r = conn.execute(q)
assert(r.first()[0] == 0)
def test_single_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/single_broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 1)
|
<commit_before>import pytest
import os
import shutil
import xarray as xr
from cosima_cookbook import database
from dask.distributed import Client
from sqlalchemy import select, func
@pytest.fixture(scope='module')
def client():
return Client()
def test_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 0)
q = select([func.count()]).select_from(schema['ncvars'])
r = conn.execute(q)
assert(r.first()[0] == 0)
def test_single_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/single_broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 1)
<commit_msg>Clean up dask client in indexing test<commit_after>
|
import pytest
import os
import shutil
import xarray as xr
from cosima_cookbook import database
from dask.distributed import Client
from sqlalchemy import select, func
@pytest.fixture(scope='module')
def client():
client = Client()
yield client
client.close()
def test_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 0)
q = select([func.count()]).select_from(schema['ncvars'])
r = conn.execute(q)
assert(r.first()[0] == 0)
def test_single_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/single_broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 1)
|
import pytest
import os
import shutil
import xarray as xr
from cosima_cookbook import database
from dask.distributed import Client
from sqlalchemy import select, func
@pytest.fixture(scope='module')
def client():
return Client()
def test_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 0)
q = select([func.count()]).select_from(schema['ncvars'])
r = conn.execute(q)
assert(r.first()[0] == 0)
def test_single_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/single_broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 1)
Clean up dask client in indexing testimport pytest
import os
import shutil
import xarray as xr
from cosima_cookbook import database
from dask.distributed import Client
from sqlalchemy import select, func
@pytest.fixture(scope='module')
def client():
client = Client()
yield client
client.close()
def test_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 0)
q = select([func.count()]).select_from(schema['ncvars'])
r = conn.execute(q)
assert(r.first()[0] == 0)
def test_single_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/single_broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 1)
|
<commit_before>import pytest
import os
import shutil
import xarray as xr
from cosima_cookbook import database
from dask.distributed import Client
from sqlalchemy import select, func
@pytest.fixture(scope='module')
def client():
return Client()
def test_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 0)
q = select([func.count()]).select_from(schema['ncvars'])
r = conn.execute(q)
assert(r.first()[0] == 0)
def test_single_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/single_broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 1)
<commit_msg>Clean up dask client in indexing test<commit_after>import pytest
import os
import shutil
import xarray as xr
from cosima_cookbook import database
from dask.distributed import Client
from sqlalchemy import select, func
@pytest.fixture(scope='module')
def client():
client = Client()
yield client
client.close()
def test_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 0)
q = select([func.count()]).select_from(schema['ncvars'])
r = conn.execute(q)
assert(r.first()[0] == 0)
def test_single_broken(client, tmp_path):
db = tmp_path / 'test.db'
database.build_index('test/data/indexing/single_broken_file', client, str(db))
# make sure the database was created
assert(db.exists())
conn, schema = database.create_database(str(db))
# query ncfiles table
q = select([func.count()]).select_from(schema['ncfiles'])
r = conn.execute(q)
assert(r.first()[0] == 1)
|
7d3de3aa2441739aa951aa100c057cfa878887d5
|
nukedb.py
|
nukedb.py
|
import sqlite3
if __name__=="__main__":
conn = sqlite3.connect('auxgis.db')
c = conn.cursor()
try:
c.execute('''DROP TABLE pos;''')
except:
pass
try:
c.execute('''DROP TABLE data;''')
except:
pass
conn.commit()
|
import sqlite3
if __name__=="__main__":
conn = sqlite3.connect('auxgis.db')
c = conn.cursor()
try:
c.execute('''DROP TABLE pos;''')
except:
pass
try:
c.execute('''DROP TABLE data;''')
except:
pass
try:
c.execute('''DROP TABLE recentchanges;''')
except:
pass
conn.commit()
|
Drop recent changes on nuke
|
Drop recent changes on nuke
|
Python
|
bsd-3-clause
|
TimSC/auxgis
|
import sqlite3
if __name__=="__main__":
conn = sqlite3.connect('auxgis.db')
c = conn.cursor()
try:
c.execute('''DROP TABLE pos;''')
except:
pass
try:
c.execute('''DROP TABLE data;''')
except:
pass
conn.commit()
Drop recent changes on nuke
|
import sqlite3
if __name__=="__main__":
conn = sqlite3.connect('auxgis.db')
c = conn.cursor()
try:
c.execute('''DROP TABLE pos;''')
except:
pass
try:
c.execute('''DROP TABLE data;''')
except:
pass
try:
c.execute('''DROP TABLE recentchanges;''')
except:
pass
conn.commit()
|
<commit_before>import sqlite3
if __name__=="__main__":
conn = sqlite3.connect('auxgis.db')
c = conn.cursor()
try:
c.execute('''DROP TABLE pos;''')
except:
pass
try:
c.execute('''DROP TABLE data;''')
except:
pass
conn.commit()
<commit_msg>Drop recent changes on nuke<commit_after>
|
import sqlite3
if __name__=="__main__":
conn = sqlite3.connect('auxgis.db')
c = conn.cursor()
try:
c.execute('''DROP TABLE pos;''')
except:
pass
try:
c.execute('''DROP TABLE data;''')
except:
pass
try:
c.execute('''DROP TABLE recentchanges;''')
except:
pass
conn.commit()
|
import sqlite3
if __name__=="__main__":
conn = sqlite3.connect('auxgis.db')
c = conn.cursor()
try:
c.execute('''DROP TABLE pos;''')
except:
pass
try:
c.execute('''DROP TABLE data;''')
except:
pass
conn.commit()
Drop recent changes on nukeimport sqlite3
if __name__=="__main__":
conn = sqlite3.connect('auxgis.db')
c = conn.cursor()
try:
c.execute('''DROP TABLE pos;''')
except:
pass
try:
c.execute('''DROP TABLE data;''')
except:
pass
try:
c.execute('''DROP TABLE recentchanges;''')
except:
pass
conn.commit()
|
<commit_before>import sqlite3
if __name__=="__main__":
conn = sqlite3.connect('auxgis.db')
c = conn.cursor()
try:
c.execute('''DROP TABLE pos;''')
except:
pass
try:
c.execute('''DROP TABLE data;''')
except:
pass
conn.commit()
<commit_msg>Drop recent changes on nuke<commit_after>import sqlite3
if __name__=="__main__":
conn = sqlite3.connect('auxgis.db')
c = conn.cursor()
try:
c.execute('''DROP TABLE pos;''')
except:
pass
try:
c.execute('''DROP TABLE data;''')
except:
pass
try:
c.execute('''DROP TABLE recentchanges;''')
except:
pass
conn.commit()
|
d0fb38da0200c1b780e296d6c5767438e2f82dc8
|
array/sudoku-check.py
|
array/sudoku-check.py
|
# Implement an algorithm that will check whether a given grid of numbers represents a valid Sudoku puzzle
def check_rows(grid):
i = 0
while i < len(grid):
j = 0
ref_check = {}
while j < len(grid[i]):
if grid[i][j] != '.' and grid[i][j] in ref_check:
return False
else:
ref_check[grid[i][j]] = 1
j += 1
i += 1
return True
def check_columns(grid):
column = 0
length = len(grid)
while column < length:
row = 0
ref_check = {}
while row < length:
if grid[row][column] != '.' and grid[row][column] in ref_check:
return False
else:
ref_check[grid[row][column]] = 1
row += 1
column += 1
return True
|
# Implement an algorithm that will check whether a given grid of numbers represents a valid Sudoku puzzle
def check_rows(grid):
i = 0
while i < len(grid):
j = 0
ref_check = {}
while j < len(grid[i]):
if grid[i][j] != '.' and grid[i][j] in ref_check:
return False
else:
ref_check[grid[i][j]] = 1
j += 1
i += 1
return True
def check_columns(grid):
column = 0
length = len(grid)
while column < length:
row = 0
ref_check = {}
while row < length:
if grid[row][column] != '.' and grid[row][column] in ref_check:
return False
else:
ref_check[grid[row][column]] = 1
row += 1
column += 1
return True
def create_sub_grid(grid):
ref_check = {}
for square in grid:
if square != '.' and square in ref_check:
return False
else:
ref_check[square] = 1
return True
|
Add check sub grid method
|
Add check sub grid method
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
# Implement an algorithm that will check whether a given grid of numbers represents a valid Sudoku puzzle
def check_rows(grid):
i = 0
while i < len(grid):
j = 0
ref_check = {}
while j < len(grid[i]):
if grid[i][j] != '.' and grid[i][j] in ref_check:
return False
else:
ref_check[grid[i][j]] = 1
j += 1
i += 1
return True
def check_columns(grid):
column = 0
length = len(grid)
while column < length:
row = 0
ref_check = {}
while row < length:
if grid[row][column] != '.' and grid[row][column] in ref_check:
return False
else:
ref_check[grid[row][column]] = 1
row += 1
column += 1
return True
Add check sub grid method
|
# Implement an algorithm that will check whether a given grid of numbers represents a valid Sudoku puzzle
def check_rows(grid):
i = 0
while i < len(grid):
j = 0
ref_check = {}
while j < len(grid[i]):
if grid[i][j] != '.' and grid[i][j] in ref_check:
return False
else:
ref_check[grid[i][j]] = 1
j += 1
i += 1
return True
def check_columns(grid):
column = 0
length = len(grid)
while column < length:
row = 0
ref_check = {}
while row < length:
if grid[row][column] != '.' and grid[row][column] in ref_check:
return False
else:
ref_check[grid[row][column]] = 1
row += 1
column += 1
return True
def create_sub_grid(grid):
ref_check = {}
for square in grid:
if square != '.' and square in ref_check:
return False
else:
ref_check[square] = 1
return True
|
<commit_before># Implement an algorithm that will check whether a given grid of numbers represents a valid Sudoku puzzle
def check_rows(grid):
i = 0
while i < len(grid):
j = 0
ref_check = {}
while j < len(grid[i]):
if grid[i][j] != '.' and grid[i][j] in ref_check:
return False
else:
ref_check[grid[i][j]] = 1
j += 1
i += 1
return True
def check_columns(grid):
column = 0
length = len(grid)
while column < length:
row = 0
ref_check = {}
while row < length:
if grid[row][column] != '.' and grid[row][column] in ref_check:
return False
else:
ref_check[grid[row][column]] = 1
row += 1
column += 1
return True
<commit_msg>Add check sub grid method<commit_after>
|
# Implement an algorithm that will check whether a given grid of numbers represents a valid Sudoku puzzle
def check_rows(grid):
i = 0
while i < len(grid):
j = 0
ref_check = {}
while j < len(grid[i]):
if grid[i][j] != '.' and grid[i][j] in ref_check:
return False
else:
ref_check[grid[i][j]] = 1
j += 1
i += 1
return True
def check_columns(grid):
column = 0
length = len(grid)
while column < length:
row = 0
ref_check = {}
while row < length:
if grid[row][column] != '.' and grid[row][column] in ref_check:
return False
else:
ref_check[grid[row][column]] = 1
row += 1
column += 1
return True
def create_sub_grid(grid):
ref_check = {}
for square in grid:
if square != '.' and square in ref_check:
return False
else:
ref_check[square] = 1
return True
|
# Implement an algorithm that will check whether a given grid of numbers represents a valid Sudoku puzzle
def check_rows(grid):
i = 0
while i < len(grid):
j = 0
ref_check = {}
while j < len(grid[i]):
if grid[i][j] != '.' and grid[i][j] in ref_check:
return False
else:
ref_check[grid[i][j]] = 1
j += 1
i += 1
return True
def check_columns(grid):
column = 0
length = len(grid)
while column < length:
row = 0
ref_check = {}
while row < length:
if grid[row][column] != '.' and grid[row][column] in ref_check:
return False
else:
ref_check[grid[row][column]] = 1
row += 1
column += 1
return True
Add check sub grid method# Implement an algorithm that will check whether a given grid of numbers represents a valid Sudoku puzzle
def check_rows(grid):
i = 0
while i < len(grid):
j = 0
ref_check = {}
while j < len(grid[i]):
if grid[i][j] != '.' and grid[i][j] in ref_check:
return False
else:
ref_check[grid[i][j]] = 1
j += 1
i += 1
return True
def check_columns(grid):
column = 0
length = len(grid)
while column < length:
row = 0
ref_check = {}
while row < length:
if grid[row][column] != '.' and grid[row][column] in ref_check:
return False
else:
ref_check[grid[row][column]] = 1
row += 1
column += 1
return True
def create_sub_grid(grid):
ref_check = {}
for square in grid:
if square != '.' and square in ref_check:
return False
else:
ref_check[square] = 1
return True
|
<commit_before># Implement an algorithm that will check whether a given grid of numbers represents a valid Sudoku puzzle
def check_rows(grid):
i = 0
while i < len(grid):
j = 0
ref_check = {}
while j < len(grid[i]):
if grid[i][j] != '.' and grid[i][j] in ref_check:
return False
else:
ref_check[grid[i][j]] = 1
j += 1
i += 1
return True
def check_columns(grid):
column = 0
length = len(grid)
while column < length:
row = 0
ref_check = {}
while row < length:
if grid[row][column] != '.' and grid[row][column] in ref_check:
return False
else:
ref_check[grid[row][column]] = 1
row += 1
column += 1
return True
<commit_msg>Add check sub grid method<commit_after># Implement an algorithm that will check whether a given grid of numbers represents a valid Sudoku puzzle
def check_rows(grid):
i = 0
while i < len(grid):
j = 0
ref_check = {}
while j < len(grid[i]):
if grid[i][j] != '.' and grid[i][j] in ref_check:
return False
else:
ref_check[grid[i][j]] = 1
j += 1
i += 1
return True
def check_columns(grid):
column = 0
length = len(grid)
while column < length:
row = 0
ref_check = {}
while row < length:
if grid[row][column] != '.' and grid[row][column] in ref_check:
return False
else:
ref_check[grid[row][column]] = 1
row += 1
column += 1
return True
def create_sub_grid(grid):
ref_check = {}
for square in grid:
if square != '.' and square in ref_check:
return False
else:
ref_check[square] = 1
return True
|
55545a23dc209afc07ebe25c296505af50207340
|
yelp_kafka_tool/util/__init__.py
|
yelp_kafka_tool/util/__init__.py
|
from __future__ import print_function
import json
import sys
from itertools import groupby
def groupsortby(data, key):
"""Sort and group by the same key."""
return groupby(sorted(data, key=key), key)
def dict_merge(set1, set2):
"""Joins two dictionaries."""
return dict(set1.items() + set2.items())
def to_h(num, suffix='B'):
"""Converts a byte value in human readable form."""
if num is None: # Show None when data is missing
return "None"
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def print_json(data):
"""Converts `data` into json and prints it to stdout.
If stdout is a tty it performs a pretty print.
"""
if sys.stdout.isatty():
print(json.dumps(data, indent=4, separators=(',', ': ')))
else:
print(json.dumps(data))
|
from __future__ import print_function
import json
import sys
from itertools import groupby
def groupsortby(data, key):
"""Sort and group by the same key."""
return groupby(sorted(data, key=key), key)
def dict_merge(set1, set2):
"""Joins two dictionaries."""
return dict(set1.items() + set2.items())
def to_h(num, suffix='B'):
"""Converts a byte value in human readable form."""
if num is None: # Show None when data is missing
return "None"
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def format_to_json(data):
"""Converts `data` into json
If stdout is a tty it performs a pretty print.
"""
if sys.stdout.isatty():
return json.dumps(data, indent=4, separators=(',', ': '))
else:
return json.dumps(data)
def print_json(data):
"""Converts `data` into json and prints it to stdout."""
print(format_to_json(data))
|
Add brokers information to the output of kafka-info
|
Add brokers information to the output of kafka-info
|
Python
|
apache-2.0
|
anthonysandrin/kafka-utils,Yelp/kafka-utils,anthonysandrin/kafka-utils,Yelp/kafka-utils
|
from __future__ import print_function
import json
import sys
from itertools import groupby
def groupsortby(data, key):
"""Sort and group by the same key."""
return groupby(sorted(data, key=key), key)
def dict_merge(set1, set2):
"""Joins two dictionaries."""
return dict(set1.items() + set2.items())
def to_h(num, suffix='B'):
"""Converts a byte value in human readable form."""
if num is None: # Show None when data is missing
return "None"
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def print_json(data):
"""Converts `data` into json and prints it to stdout.
If stdout is a tty it performs a pretty print.
"""
if sys.stdout.isatty():
print(json.dumps(data, indent=4, separators=(',', ': ')))
else:
print(json.dumps(data))
Add brokers information to the output of kafka-info
|
from __future__ import print_function
import json
import sys
from itertools import groupby
def groupsortby(data, key):
"""Sort and group by the same key."""
return groupby(sorted(data, key=key), key)
def dict_merge(set1, set2):
"""Joins two dictionaries."""
return dict(set1.items() + set2.items())
def to_h(num, suffix='B'):
"""Converts a byte value in human readable form."""
if num is None: # Show None when data is missing
return "None"
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def format_to_json(data):
"""Converts `data` into json
If stdout is a tty it performs a pretty print.
"""
if sys.stdout.isatty():
return json.dumps(data, indent=4, separators=(',', ': '))
else:
return json.dumps(data)
def print_json(data):
"""Converts `data` into json and prints it to stdout."""
print(format_to_json(data))
|
<commit_before>from __future__ import print_function
import json
import sys
from itertools import groupby
def groupsortby(data, key):
"""Sort and group by the same key."""
return groupby(sorted(data, key=key), key)
def dict_merge(set1, set2):
"""Joins two dictionaries."""
return dict(set1.items() + set2.items())
def to_h(num, suffix='B'):
"""Converts a byte value in human readable form."""
if num is None: # Show None when data is missing
return "None"
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def print_json(data):
"""Converts `data` into json and prints it to stdout.
If stdout is a tty it performs a pretty print.
"""
if sys.stdout.isatty():
print(json.dumps(data, indent=4, separators=(',', ': ')))
else:
print(json.dumps(data))
<commit_msg>Add brokers information to the output of kafka-info<commit_after>
|
from __future__ import print_function
import json
import sys
from itertools import groupby
def groupsortby(data, key):
"""Sort and group by the same key."""
return groupby(sorted(data, key=key), key)
def dict_merge(set1, set2):
"""Joins two dictionaries."""
return dict(set1.items() + set2.items())
def to_h(num, suffix='B'):
"""Converts a byte value in human readable form."""
if num is None: # Show None when data is missing
return "None"
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def format_to_json(data):
"""Converts `data` into json
If stdout is a tty it performs a pretty print.
"""
if sys.stdout.isatty():
return json.dumps(data, indent=4, separators=(',', ': '))
else:
return json.dumps(data)
def print_json(data):
"""Converts `data` into json and prints it to stdout."""
print(format_to_json(data))
|
from __future__ import print_function
import json
import sys
from itertools import groupby
def groupsortby(data, key):
"""Sort and group by the same key."""
return groupby(sorted(data, key=key), key)
def dict_merge(set1, set2):
"""Joins two dictionaries."""
return dict(set1.items() + set2.items())
def to_h(num, suffix='B'):
"""Converts a byte value in human readable form."""
if num is None: # Show None when data is missing
return "None"
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def print_json(data):
"""Converts `data` into json and prints it to stdout.
If stdout is a tty it performs a pretty print.
"""
if sys.stdout.isatty():
print(json.dumps(data, indent=4, separators=(',', ': ')))
else:
print(json.dumps(data))
Add brokers information to the output of kafka-infofrom __future__ import print_function
import json
import sys
from itertools import groupby
def groupsortby(data, key):
"""Sort and group by the same key."""
return groupby(sorted(data, key=key), key)
def dict_merge(set1, set2):
"""Joins two dictionaries."""
return dict(set1.items() + set2.items())
def to_h(num, suffix='B'):
"""Converts a byte value in human readable form."""
if num is None: # Show None when data is missing
return "None"
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def format_to_json(data):
"""Converts `data` into json
If stdout is a tty it performs a pretty print.
"""
if sys.stdout.isatty():
return json.dumps(data, indent=4, separators=(',', ': '))
else:
return json.dumps(data)
def print_json(data):
"""Converts `data` into json and prints it to stdout."""
print(format_to_json(data))
|
<commit_before>from __future__ import print_function
import json
import sys
from itertools import groupby
def groupsortby(data, key):
"""Sort and group by the same key."""
return groupby(sorted(data, key=key), key)
def dict_merge(set1, set2):
"""Joins two dictionaries."""
return dict(set1.items() + set2.items())
def to_h(num, suffix='B'):
"""Converts a byte value in human readable form."""
if num is None: # Show None when data is missing
return "None"
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def print_json(data):
"""Converts `data` into json and prints it to stdout.
If stdout is a tty it performs a pretty print.
"""
if sys.stdout.isatty():
print(json.dumps(data, indent=4, separators=(',', ': ')))
else:
print(json.dumps(data))
<commit_msg>Add brokers information to the output of kafka-info<commit_after>from __future__ import print_function
import json
import sys
from itertools import groupby
def groupsortby(data, key):
"""Sort and group by the same key."""
return groupby(sorted(data, key=key), key)
def dict_merge(set1, set2):
"""Joins two dictionaries."""
return dict(set1.items() + set2.items())
def to_h(num, suffix='B'):
"""Converts a byte value in human readable form."""
if num is None: # Show None when data is missing
return "None"
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def format_to_json(data):
"""Converts `data` into json
If stdout is a tty it performs a pretty print.
"""
if sys.stdout.isatty():
return json.dumps(data, indent=4, separators=(',', ': '))
else:
return json.dumps(data)
def print_json(data):
"""Converts `data` into json and prints it to stdout."""
print(format_to_json(data))
|
f516749bc41dbebeb5b0ae07078af78f510a592e
|
lib/markdown_deux/__init__.py
|
lib/markdown_deux/__init__.py
|
#!/usr/bin/env python
# Copyright (c) 2008-2010 ActiveState Corp.
# License: MIT (http://www.opensource.org/licenses/mit-license.php)
r"""A small Django app that provides template tags for Markdown using the
python-markdown2 library.
See <http://github.com/trentm/django-markdown-deux> for more info.
"""
__version_info__ = (1, 0, 2)
__version__ = '1.0.1'
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
print "XXX markdown_deux.markdown(style=%r) -> %r" % (style, get_style(style))
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
|
#!/usr/bin/env python
# Copyright (c) 2008-2010 ActiveState Corp.
# License: MIT (http://www.opensource.org/licenses/mit-license.php)
r"""A small Django app that provides template tags for Markdown using the
python-markdown2 library.
See <http://github.com/trentm/django-markdown-deux> for more info.
"""
__version_info__ = (1, 0, 2)
__version__ = '.'.join(map(str, __version_info__))
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
|
Fix having ver info written twice (divergence). Makes "mk cut_a_release" ver update work.
|
Fix having ver info written twice (divergence). Makes "mk cut_a_release" ver update work.
|
Python
|
mit
|
douzepouze/django-markdown-tag,trentm/django-markdown-deux,gogobook/django-markdown-deux,gogobook/django-markdown-deux
|
#!/usr/bin/env python
# Copyright (c) 2008-2010 ActiveState Corp.
# License: MIT (http://www.opensource.org/licenses/mit-license.php)
r"""A small Django app that provides template tags for Markdown using the
python-markdown2 library.
See <http://github.com/trentm/django-markdown-deux> for more info.
"""
__version_info__ = (1, 0, 2)
__version__ = '1.0.1'
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
print "XXX markdown_deux.markdown(style=%r) -> %r" % (style, get_style(style))
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
Fix having ver info written twice (divergence). Makes "mk cut_a_release" ver update work.
|
#!/usr/bin/env python
# Copyright (c) 2008-2010 ActiveState Corp.
# License: MIT (http://www.opensource.org/licenses/mit-license.php)
r"""A small Django app that provides template tags for Markdown using the
python-markdown2 library.
See <http://github.com/trentm/django-markdown-deux> for more info.
"""
__version_info__ = (1, 0, 2)
__version__ = '.'.join(map(str, __version_info__))
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2008-2010 ActiveState Corp.
# License: MIT (http://www.opensource.org/licenses/mit-license.php)
r"""A small Django app that provides template tags for Markdown using the
python-markdown2 library.
See <http://github.com/trentm/django-markdown-deux> for more info.
"""
__version_info__ = (1, 0, 2)
__version__ = '1.0.1'
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
print "XXX markdown_deux.markdown(style=%r) -> %r" % (style, get_style(style))
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
<commit_msg>Fix having ver info written twice (divergence). Makes "mk cut_a_release" ver update work.<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2008-2010 ActiveState Corp.
# License: MIT (http://www.opensource.org/licenses/mit-license.php)
r"""A small Django app that provides template tags for Markdown using the
python-markdown2 library.
See <http://github.com/trentm/django-markdown-deux> for more info.
"""
__version_info__ = (1, 0, 2)
__version__ = '.'.join(map(str, __version_info__))
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
|
#!/usr/bin/env python
# Copyright (c) 2008-2010 ActiveState Corp.
# License: MIT (http://www.opensource.org/licenses/mit-license.php)
r"""A small Django app that provides template tags for Markdown using the
python-markdown2 library.
See <http://github.com/trentm/django-markdown-deux> for more info.
"""
__version_info__ = (1, 0, 2)
__version__ = '1.0.1'
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
print "XXX markdown_deux.markdown(style=%r) -> %r" % (style, get_style(style))
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
Fix having ver info written twice (divergence). Makes "mk cut_a_release" ver update work.#!/usr/bin/env python
# Copyright (c) 2008-2010 ActiveState Corp.
# License: MIT (http://www.opensource.org/licenses/mit-license.php)
r"""A small Django app that provides template tags for Markdown using the
python-markdown2 library.
See <http://github.com/trentm/django-markdown-deux> for more info.
"""
__version_info__ = (1, 0, 2)
__version__ = '.'.join(map(str, __version_info__))
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2008-2010 ActiveState Corp.
# License: MIT (http://www.opensource.org/licenses/mit-license.php)
r"""A small Django app that provides template tags for Markdown using the
python-markdown2 library.
See <http://github.com/trentm/django-markdown-deux> for more info.
"""
__version_info__ = (1, 0, 2)
__version__ = '1.0.1'
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
print "XXX markdown_deux.markdown(style=%r) -> %r" % (style, get_style(style))
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
<commit_msg>Fix having ver info written twice (divergence). Makes "mk cut_a_release" ver update work.<commit_after>#!/usr/bin/env python
# Copyright (c) 2008-2010 ActiveState Corp.
# License: MIT (http://www.opensource.org/licenses/mit-license.php)
r"""A small Django app that provides template tags for Markdown using the
python-markdown2 library.
See <http://github.com/trentm/django-markdown-deux> for more info.
"""
__version_info__ = (1, 0, 2)
__version__ = '.'.join(map(str, __version_info__))
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
|
c84728b57d1c8923cdadec10f132953de4c1dd21
|
tests/integration/conftest.py
|
tests/integration/conftest.py
|
import pytest
@pytest.fixture
def coinbase():
return '0xdc544d1aa88ff8bbd2f2aec754b1f1e99e1812fd'
@pytest.fixture
def private_key():
return '0x58d23b55bc9cdce1f18c2500f40ff4ab7245df9a89505e9b1fa4851f623d241d'
KEYFILE = '{"address":"dc544d1aa88ff8bbd2f2aec754b1f1e99e1812fd","crypto":{"cipher":"aes-128-ctr","ciphertext":"52e06bc9397ea9fa2f0dae8de2b3e8116e92a2ecca9ad5ff0061d1c449704e98","cipherparams":{"iv":"aa5d0a5370ef65395c1a6607af857124"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":262144,"p":1,"r":8,"salt":"9fdf0764eb3645ffc184e166537f6fe70516bf0e34dc7311dea21f100f0c9263"},"mac":"4e0b51f42b865c15c485f4faefdd1f01a38637e5247f8c75ffe6a8c0eba856f6"},"id":"5a6124e0-10f1-4c1c-ae3e-d903eacb740a","version":3}' # noqa: E501
|
import pytest
from web3.utils.module_testing.math_contract import (
MATH_BYTECODE,
MATH_ABI,
)
from web3.utils.module_testing.emitter_contract import (
EMITTER_BYTECODE,
EMITTER_ABI,
)
@pytest.fixture(scope="session")
def math_contract_factory(web3):
contract_factory = web3.eth.contract(abi=MATH_ABI, bytecode=MATH_BYTECODE)
return contract_factory
@pytest.fixture(scope="session")
def emitter_contract_factory(web3):
contract_factory = web3.eth.contract(abi=EMITTER_ABI, bytecode=EMITTER_BYTECODE)
return contract_factory
|
Add common factory fixtures to be shared across integration tests
|
Add common factory fixtures to be shared across integration tests
|
Python
|
mit
|
pipermerriam/web3.py
|
import pytest
@pytest.fixture
def coinbase():
return '0xdc544d1aa88ff8bbd2f2aec754b1f1e99e1812fd'
@pytest.fixture
def private_key():
return '0x58d23b55bc9cdce1f18c2500f40ff4ab7245df9a89505e9b1fa4851f623d241d'
KEYFILE = '{"address":"dc544d1aa88ff8bbd2f2aec754b1f1e99e1812fd","crypto":{"cipher":"aes-128-ctr","ciphertext":"52e06bc9397ea9fa2f0dae8de2b3e8116e92a2ecca9ad5ff0061d1c449704e98","cipherparams":{"iv":"aa5d0a5370ef65395c1a6607af857124"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":262144,"p":1,"r":8,"salt":"9fdf0764eb3645ffc184e166537f6fe70516bf0e34dc7311dea21f100f0c9263"},"mac":"4e0b51f42b865c15c485f4faefdd1f01a38637e5247f8c75ffe6a8c0eba856f6"},"id":"5a6124e0-10f1-4c1c-ae3e-d903eacb740a","version":3}' # noqa: E501
Add common factory fixtures to be shared across integration tests
|
import pytest
from web3.utils.module_testing.math_contract import (
MATH_BYTECODE,
MATH_ABI,
)
from web3.utils.module_testing.emitter_contract import (
EMITTER_BYTECODE,
EMITTER_ABI,
)
@pytest.fixture(scope="session")
def math_contract_factory(web3):
contract_factory = web3.eth.contract(abi=MATH_ABI, bytecode=MATH_BYTECODE)
return contract_factory
@pytest.fixture(scope="session")
def emitter_contract_factory(web3):
contract_factory = web3.eth.contract(abi=EMITTER_ABI, bytecode=EMITTER_BYTECODE)
return contract_factory
|
<commit_before>import pytest
@pytest.fixture
def coinbase():
return '0xdc544d1aa88ff8bbd2f2aec754b1f1e99e1812fd'
@pytest.fixture
def private_key():
return '0x58d23b55bc9cdce1f18c2500f40ff4ab7245df9a89505e9b1fa4851f623d241d'
KEYFILE = '{"address":"dc544d1aa88ff8bbd2f2aec754b1f1e99e1812fd","crypto":{"cipher":"aes-128-ctr","ciphertext":"52e06bc9397ea9fa2f0dae8de2b3e8116e92a2ecca9ad5ff0061d1c449704e98","cipherparams":{"iv":"aa5d0a5370ef65395c1a6607af857124"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":262144,"p":1,"r":8,"salt":"9fdf0764eb3645ffc184e166537f6fe70516bf0e34dc7311dea21f100f0c9263"},"mac":"4e0b51f42b865c15c485f4faefdd1f01a38637e5247f8c75ffe6a8c0eba856f6"},"id":"5a6124e0-10f1-4c1c-ae3e-d903eacb740a","version":3}' # noqa: E501
<commit_msg>Add common factory fixtures to be shared across integration tests<commit_after>
|
import pytest
from web3.utils.module_testing.math_contract import (
MATH_BYTECODE,
MATH_ABI,
)
from web3.utils.module_testing.emitter_contract import (
EMITTER_BYTECODE,
EMITTER_ABI,
)
@pytest.fixture(scope="session")
def math_contract_factory(web3):
contract_factory = web3.eth.contract(abi=MATH_ABI, bytecode=MATH_BYTECODE)
return contract_factory
@pytest.fixture(scope="session")
def emitter_contract_factory(web3):
contract_factory = web3.eth.contract(abi=EMITTER_ABI, bytecode=EMITTER_BYTECODE)
return contract_factory
|
import pytest
@pytest.fixture
def coinbase():
return '0xdc544d1aa88ff8bbd2f2aec754b1f1e99e1812fd'
@pytest.fixture
def private_key():
return '0x58d23b55bc9cdce1f18c2500f40ff4ab7245df9a89505e9b1fa4851f623d241d'
KEYFILE = '{"address":"dc544d1aa88ff8bbd2f2aec754b1f1e99e1812fd","crypto":{"cipher":"aes-128-ctr","ciphertext":"52e06bc9397ea9fa2f0dae8de2b3e8116e92a2ecca9ad5ff0061d1c449704e98","cipherparams":{"iv":"aa5d0a5370ef65395c1a6607af857124"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":262144,"p":1,"r":8,"salt":"9fdf0764eb3645ffc184e166537f6fe70516bf0e34dc7311dea21f100f0c9263"},"mac":"4e0b51f42b865c15c485f4faefdd1f01a38637e5247f8c75ffe6a8c0eba856f6"},"id":"5a6124e0-10f1-4c1c-ae3e-d903eacb740a","version":3}' # noqa: E501
Add common factory fixtures to be shared across integration testsimport pytest
from web3.utils.module_testing.math_contract import (
MATH_BYTECODE,
MATH_ABI,
)
from web3.utils.module_testing.emitter_contract import (
EMITTER_BYTECODE,
EMITTER_ABI,
)
@pytest.fixture(scope="session")
def math_contract_factory(web3):
contract_factory = web3.eth.contract(abi=MATH_ABI, bytecode=MATH_BYTECODE)
return contract_factory
@pytest.fixture(scope="session")
def emitter_contract_factory(web3):
contract_factory = web3.eth.contract(abi=EMITTER_ABI, bytecode=EMITTER_BYTECODE)
return contract_factory
|
<commit_before>import pytest
@pytest.fixture
def coinbase():
return '0xdc544d1aa88ff8bbd2f2aec754b1f1e99e1812fd'
@pytest.fixture
def private_key():
return '0x58d23b55bc9cdce1f18c2500f40ff4ab7245df9a89505e9b1fa4851f623d241d'
KEYFILE = '{"address":"dc544d1aa88ff8bbd2f2aec754b1f1e99e1812fd","crypto":{"cipher":"aes-128-ctr","ciphertext":"52e06bc9397ea9fa2f0dae8de2b3e8116e92a2ecca9ad5ff0061d1c449704e98","cipherparams":{"iv":"aa5d0a5370ef65395c1a6607af857124"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":262144,"p":1,"r":8,"salt":"9fdf0764eb3645ffc184e166537f6fe70516bf0e34dc7311dea21f100f0c9263"},"mac":"4e0b51f42b865c15c485f4faefdd1f01a38637e5247f8c75ffe6a8c0eba856f6"},"id":"5a6124e0-10f1-4c1c-ae3e-d903eacb740a","version":3}' # noqa: E501
<commit_msg>Add common factory fixtures to be shared across integration tests<commit_after>import pytest
from web3.utils.module_testing.math_contract import (
MATH_BYTECODE,
MATH_ABI,
)
from web3.utils.module_testing.emitter_contract import (
EMITTER_BYTECODE,
EMITTER_ABI,
)
@pytest.fixture(scope="session")
def math_contract_factory(web3):
contract_factory = web3.eth.contract(abi=MATH_ABI, bytecode=MATH_BYTECODE)
return contract_factory
@pytest.fixture(scope="session")
def emitter_contract_factory(web3):
contract_factory = web3.eth.contract(abi=EMITTER_ABI, bytecode=EMITTER_BYTECODE)
return contract_factory
|
24eae355c01365ce6eb219f0ca99a53d4df67be4
|
mccurse/__init__.py
|
mccurse/__init__.py
|
"""Minecraft Curse CLI Client"""
import gettext
from pathlib import Path
#: Consistent names definitions
RESOURCE_NAME = __package__
#: Root of the package
PKGDIR = Path(__file__).resolve().parent
#: Package data directory
PKGDATA = PKGDIR / '_data'
#: Root of the locale files
localedir = PKGDATA / 'locales'
#: Translation machinery for the app
translation = gettext.translation(
domain=__package__,
# Allow the locale files to be stored in system folder
localedir=str(localedir) if localedir.is_dir() else None,
fallback=True,
)
_ = translation.gettext
|
"""Minecraft Curse CLI Client"""
import gettext
from pathlib import Path
#: Consistent names definitions
RESOURCE_NAME = __package__
#: Root of the package
PKGDIR = Path(__file__).resolve().parent
#: Package data directory
PKGDATA = PKGDIR / '_data_'
#: Root of the locale files
localedir = PKGDATA / 'locales'
#: Translation machinery for the app
translation = gettext.translation(
domain=__package__,
# Allow the locale files to be stored in system folder
localedir=str(localedir) if localedir.is_dir() else None,
fallback=True,
)
_ = translation.gettext
|
Fix typo in PKGDATA path
|
Fix typo in PKGDATA path
|
Python
|
agpl-3.0
|
khardix/mccurse
|
"""Minecraft Curse CLI Client"""
import gettext
from pathlib import Path
#: Consistent names definitions
RESOURCE_NAME = __package__
#: Root of the package
PKGDIR = Path(__file__).resolve().parent
#: Package data directory
PKGDATA = PKGDIR / '_data'
#: Root of the locale files
localedir = PKGDATA / 'locales'
#: Translation machinery for the app
translation = gettext.translation(
domain=__package__,
# Allow the locale files to be stored in system folder
localedir=str(localedir) if localedir.is_dir() else None,
fallback=True,
)
_ = translation.gettext
Fix typo in PKGDATA path
|
"""Minecraft Curse CLI Client"""
import gettext
from pathlib import Path
#: Consistent names definitions
RESOURCE_NAME = __package__
#: Root of the package
PKGDIR = Path(__file__).resolve().parent
#: Package data directory
PKGDATA = PKGDIR / '_data_'
#: Root of the locale files
localedir = PKGDATA / 'locales'
#: Translation machinery for the app
translation = gettext.translation(
domain=__package__,
# Allow the locale files to be stored in system folder
localedir=str(localedir) if localedir.is_dir() else None,
fallback=True,
)
_ = translation.gettext
|
<commit_before>"""Minecraft Curse CLI Client"""
import gettext
from pathlib import Path
#: Consistent names definitions
RESOURCE_NAME = __package__
#: Root of the package
PKGDIR = Path(__file__).resolve().parent
#: Package data directory
PKGDATA = PKGDIR / '_data'
#: Root of the locale files
localedir = PKGDATA / 'locales'
#: Translation machinery for the app
translation = gettext.translation(
domain=__package__,
# Allow the locale files to be stored in system folder
localedir=str(localedir) if localedir.is_dir() else None,
fallback=True,
)
_ = translation.gettext
<commit_msg>Fix typo in PKGDATA path<commit_after>
|
"""Minecraft Curse CLI Client"""
import gettext
from pathlib import Path
#: Consistent names definitions
RESOURCE_NAME = __package__
#: Root of the package
PKGDIR = Path(__file__).resolve().parent
#: Package data directory
PKGDATA = PKGDIR / '_data_'
#: Root of the locale files
localedir = PKGDATA / 'locales'
#: Translation machinery for the app
translation = gettext.translation(
domain=__package__,
# Allow the locale files to be stored in system folder
localedir=str(localedir) if localedir.is_dir() else None,
fallback=True,
)
_ = translation.gettext
|
"""Minecraft Curse CLI Client"""
import gettext
from pathlib import Path
#: Consistent names definitions
RESOURCE_NAME = __package__
#: Root of the package
PKGDIR = Path(__file__).resolve().parent
#: Package data directory
PKGDATA = PKGDIR / '_data'
#: Root of the locale files
localedir = PKGDATA / 'locales'
#: Translation machinery for the app
translation = gettext.translation(
domain=__package__,
# Allow the locale files to be stored in system folder
localedir=str(localedir) if localedir.is_dir() else None,
fallback=True,
)
_ = translation.gettext
Fix typo in PKGDATA path"""Minecraft Curse CLI Client"""
import gettext
from pathlib import Path
#: Consistent names definitions
RESOURCE_NAME = __package__
#: Root of the package
PKGDIR = Path(__file__).resolve().parent
#: Package data directory
PKGDATA = PKGDIR / '_data_'
#: Root of the locale files
localedir = PKGDATA / 'locales'
#: Translation machinery for the app
translation = gettext.translation(
domain=__package__,
# Allow the locale files to be stored in system folder
localedir=str(localedir) if localedir.is_dir() else None,
fallback=True,
)
_ = translation.gettext
|
<commit_before>"""Minecraft Curse CLI Client"""
import gettext
from pathlib import Path
#: Consistent names definitions
RESOURCE_NAME = __package__
#: Root of the package
PKGDIR = Path(__file__).resolve().parent
#: Package data directory
PKGDATA = PKGDIR / '_data'
#: Root of the locale files
localedir = PKGDATA / 'locales'
#: Translation machinery for the app
translation = gettext.translation(
domain=__package__,
# Allow the locale files to be stored in system folder
localedir=str(localedir) if localedir.is_dir() else None,
fallback=True,
)
_ = translation.gettext
<commit_msg>Fix typo in PKGDATA path<commit_after>"""Minecraft Curse CLI Client"""
import gettext
from pathlib import Path
#: Consistent names definitions
RESOURCE_NAME = __package__
#: Root of the package
PKGDIR = Path(__file__).resolve().parent
#: Package data directory
PKGDATA = PKGDIR / '_data_'
#: Root of the locale files
localedir = PKGDATA / 'locales'
#: Translation machinery for the app
translation = gettext.translation(
domain=__package__,
# Allow the locale files to be stored in system folder
localedir=str(localedir) if localedir.is_dir() else None,
fallback=True,
)
_ = translation.gettext
|
564ae1eb637ec509f37ade93d4079117cc73fd58
|
lab_assistant/storage/__init__.py
|
lab_assistant/storage/__init__.py
|
from copy import deepcopy
from simpleflake import simpleflake
from lab_assistant import conf, utils
__all__ = [
'get_storage',
'store',
'retrieve',
'retrieve_all',
'clear',
]
def get_storage(path=None, name='Experiment', **opts):
if not path:
path = conf.storage['path']
_opts = deepcopy(conf.storage.get('options', {}))
_opts.update(opts)
opts = _opts
if path in get_storage._cache:
return get_storage._cache[path]
Storage = utils.import_path(path)
get_storage._cache[path] = Storage(name, **opts)
return get_storage._cache[path]
get_storage._cache = {}
def store(result, storage=None):
storage = storage or get_storage(name=result.experiment.name)
key = simpleflake()
storage.set(key, result)
return key
def retrieve(key, storage=None):
storage = storage or get_storage()
return storage.get(key)
def retrieve_all(storage=None):
return (storage or get_storage()).list()
def remove(key, storage=None):
(storage or get_storage()).remove(key)
def clear(storage=None):
return (storage or get_storage()).clear()
|
from copy import deepcopy
from collections import defaultdict
from simpleflake import simpleflake
from lab_assistant import conf, utils
__all__ = [
'get_storage',
'store',
'retrieve',
'retrieve_all',
'clear',
]
def get_storage(path=None, name='Experiment', **opts):
if not path:
path = conf.storage['path']
_opts = deepcopy(conf.storage.get('options', {}))
_opts.update(opts)
opts = _opts
if path in get_storage._cache:
if name in get_storage._cache[path]:
return get_storage._cache[path][name]
Storage = utils.import_path(path)
get_storage._cache[path].update({
name: Storage(name, **opts)
})
return get_storage._cache[path][name]
get_storage._cache = defaultdict(dict)
def store(result, storage=None):
storage = storage or get_storage(name=result.experiment.name)
key = simpleflake()
storage.set(key, result)
return key
def retrieve(key, storage=None):
storage = storage or get_storage()
return storage.get(key)
def retrieve_all(storage=None):
return (storage or get_storage()).list()
def remove(key, storage=None):
(storage or get_storage()).remove(key)
def clear(storage=None):
return (storage or get_storage()).clear()
|
Fix get_storage cache to hold separate entries for each experiment key
|
Fix get_storage cache to hold separate entries for each experiment key
|
Python
|
mit
|
joealcorn/lab_assistant
|
from copy import deepcopy
from simpleflake import simpleflake
from lab_assistant import conf, utils
__all__ = [
'get_storage',
'store',
'retrieve',
'retrieve_all',
'clear',
]
def get_storage(path=None, name='Experiment', **opts):
if not path:
path = conf.storage['path']
_opts = deepcopy(conf.storage.get('options', {}))
_opts.update(opts)
opts = _opts
if path in get_storage._cache:
return get_storage._cache[path]
Storage = utils.import_path(path)
get_storage._cache[path] = Storage(name, **opts)
return get_storage._cache[path]
get_storage._cache = {}
def store(result, storage=None):
storage = storage or get_storage(name=result.experiment.name)
key = simpleflake()
storage.set(key, result)
return key
def retrieve(key, storage=None):
storage = storage or get_storage()
return storage.get(key)
def retrieve_all(storage=None):
return (storage or get_storage()).list()
def remove(key, storage=None):
(storage or get_storage()).remove(key)
def clear(storage=None):
return (storage or get_storage()).clear()
Fix get_storage cache to hold separate entries for each experiment key
|
from copy import deepcopy
from collections import defaultdict
from simpleflake import simpleflake
from lab_assistant import conf, utils
__all__ = [
'get_storage',
'store',
'retrieve',
'retrieve_all',
'clear',
]
def get_storage(path=None, name='Experiment', **opts):
if not path:
path = conf.storage['path']
_opts = deepcopy(conf.storage.get('options', {}))
_opts.update(opts)
opts = _opts
if path in get_storage._cache:
if name in get_storage._cache[path]:
return get_storage._cache[path][name]
Storage = utils.import_path(path)
get_storage._cache[path].update({
name: Storage(name, **opts)
})
return get_storage._cache[path][name]
get_storage._cache = defaultdict(dict)
def store(result, storage=None):
storage = storage or get_storage(name=result.experiment.name)
key = simpleflake()
storage.set(key, result)
return key
def retrieve(key, storage=None):
storage = storage or get_storage()
return storage.get(key)
def retrieve_all(storage=None):
return (storage or get_storage()).list()
def remove(key, storage=None):
(storage or get_storage()).remove(key)
def clear(storage=None):
return (storage or get_storage()).clear()
|
<commit_before>from copy import deepcopy
from simpleflake import simpleflake
from lab_assistant import conf, utils
__all__ = [
'get_storage',
'store',
'retrieve',
'retrieve_all',
'clear',
]
def get_storage(path=None, name='Experiment', **opts):
if not path:
path = conf.storage['path']
_opts = deepcopy(conf.storage.get('options', {}))
_opts.update(opts)
opts = _opts
if path in get_storage._cache:
return get_storage._cache[path]
Storage = utils.import_path(path)
get_storage._cache[path] = Storage(name, **opts)
return get_storage._cache[path]
get_storage._cache = {}
def store(result, storage=None):
storage = storage or get_storage(name=result.experiment.name)
key = simpleflake()
storage.set(key, result)
return key
def retrieve(key, storage=None):
storage = storage or get_storage()
return storage.get(key)
def retrieve_all(storage=None):
return (storage or get_storage()).list()
def remove(key, storage=None):
(storage or get_storage()).remove(key)
def clear(storage=None):
return (storage or get_storage()).clear()
<commit_msg>Fix get_storage cache to hold separate entries for each experiment key<commit_after>
|
from copy import deepcopy
from collections import defaultdict
from simpleflake import simpleflake
from lab_assistant import conf, utils
__all__ = [
'get_storage',
'store',
'retrieve',
'retrieve_all',
'clear',
]
def get_storage(path=None, name='Experiment', **opts):
if not path:
path = conf.storage['path']
_opts = deepcopy(conf.storage.get('options', {}))
_opts.update(opts)
opts = _opts
if path in get_storage._cache:
if name in get_storage._cache[path]:
return get_storage._cache[path][name]
Storage = utils.import_path(path)
get_storage._cache[path].update({
name: Storage(name, **opts)
})
return get_storage._cache[path][name]
get_storage._cache = defaultdict(dict)
def store(result, storage=None):
storage = storage or get_storage(name=result.experiment.name)
key = simpleflake()
storage.set(key, result)
return key
def retrieve(key, storage=None):
storage = storage or get_storage()
return storage.get(key)
def retrieve_all(storage=None):
return (storage or get_storage()).list()
def remove(key, storage=None):
(storage or get_storage()).remove(key)
def clear(storage=None):
return (storage or get_storage()).clear()
|
from copy import deepcopy
from simpleflake import simpleflake
from lab_assistant import conf, utils
__all__ = [
'get_storage',
'store',
'retrieve',
'retrieve_all',
'clear',
]
def get_storage(path=None, name='Experiment', **opts):
if not path:
path = conf.storage['path']
_opts = deepcopy(conf.storage.get('options', {}))
_opts.update(opts)
opts = _opts
if path in get_storage._cache:
return get_storage._cache[path]
Storage = utils.import_path(path)
get_storage._cache[path] = Storage(name, **opts)
return get_storage._cache[path]
get_storage._cache = {}
def store(result, storage=None):
storage = storage or get_storage(name=result.experiment.name)
key = simpleflake()
storage.set(key, result)
return key
def retrieve(key, storage=None):
storage = storage or get_storage()
return storage.get(key)
def retrieve_all(storage=None):
return (storage or get_storage()).list()
def remove(key, storage=None):
(storage or get_storage()).remove(key)
def clear(storage=None):
return (storage or get_storage()).clear()
Fix get_storage cache to hold separate entries for each experiment keyfrom copy import deepcopy
from collections import defaultdict
from simpleflake import simpleflake
from lab_assistant import conf, utils
__all__ = [
'get_storage',
'store',
'retrieve',
'retrieve_all',
'clear',
]
def get_storage(path=None, name='Experiment', **opts):
if not path:
path = conf.storage['path']
_opts = deepcopy(conf.storage.get('options', {}))
_opts.update(opts)
opts = _opts
if path in get_storage._cache:
if name in get_storage._cache[path]:
return get_storage._cache[path][name]
Storage = utils.import_path(path)
get_storage._cache[path].update({
name: Storage(name, **opts)
})
return get_storage._cache[path][name]
get_storage._cache = defaultdict(dict)
def store(result, storage=None):
storage = storage or get_storage(name=result.experiment.name)
key = simpleflake()
storage.set(key, result)
return key
def retrieve(key, storage=None):
storage = storage or get_storage()
return storage.get(key)
def retrieve_all(storage=None):
return (storage or get_storage()).list()
def remove(key, storage=None):
(storage or get_storage()).remove(key)
def clear(storage=None):
return (storage or get_storage()).clear()
|
<commit_before>from copy import deepcopy
from simpleflake import simpleflake
from lab_assistant import conf, utils
__all__ = [
'get_storage',
'store',
'retrieve',
'retrieve_all',
'clear',
]
def get_storage(path=None, name='Experiment', **opts):
if not path:
path = conf.storage['path']
_opts = deepcopy(conf.storage.get('options', {}))
_opts.update(opts)
opts = _opts
if path in get_storage._cache:
return get_storage._cache[path]
Storage = utils.import_path(path)
get_storage._cache[path] = Storage(name, **opts)
return get_storage._cache[path]
get_storage._cache = {}
def store(result, storage=None):
storage = storage or get_storage(name=result.experiment.name)
key = simpleflake()
storage.set(key, result)
return key
def retrieve(key, storage=None):
storage = storage or get_storage()
return storage.get(key)
def retrieve_all(storage=None):
return (storage or get_storage()).list()
def remove(key, storage=None):
(storage or get_storage()).remove(key)
def clear(storage=None):
return (storage or get_storage()).clear()
<commit_msg>Fix get_storage cache to hold separate entries for each experiment key<commit_after>from copy import deepcopy
from collections import defaultdict
from simpleflake import simpleflake
from lab_assistant import conf, utils
__all__ = [
'get_storage',
'store',
'retrieve',
'retrieve_all',
'clear',
]
def get_storage(path=None, name='Experiment', **opts):
if not path:
path = conf.storage['path']
_opts = deepcopy(conf.storage.get('options', {}))
_opts.update(opts)
opts = _opts
if path in get_storage._cache:
if name in get_storage._cache[path]:
return get_storage._cache[path][name]
Storage = utils.import_path(path)
get_storage._cache[path].update({
name: Storage(name, **opts)
})
return get_storage._cache[path][name]
get_storage._cache = defaultdict(dict)
def store(result, storage=None):
storage = storage or get_storage(name=result.experiment.name)
key = simpleflake()
storage.set(key, result)
return key
def retrieve(key, storage=None):
storage = storage or get_storage()
return storage.get(key)
def retrieve_all(storage=None):
return (storage or get_storage()).list()
def remove(key, storage=None):
(storage or get_storage()).remove(key)
def clear(storage=None):
return (storage or get_storage()).clear()
|
cd4c268b0752f85f8dadac03e28f152767ce9f54
|
tinycontent/templatetags/tinycontent_tags.py
|
tinycontent/templatetags/tinycontent_tags.py
|
from django import template
from django.template.base import TemplateSyntaxError
from tinycontent.models import TinyContent
register = template.Library()
class TinyContentNode(template.Node):
def __init__(self, content_name, nodelist):
self.content_name = content_name
self.nodelist = nodelist
def get_content_name(self, context):
if self.content_name[0] != '"' and self.content_name[0] != "'":
try:
return context[self.content_name]
except KeyError:
raise TinyContent.DoesNotExist
if self.content_name[0] == '"' and self.content_name[-1] == '"':
return self.content_name[1:-1]
if self.content_name[0] == "'" and self.content_name[-1] == "'":
return self.content_name[1:-1]
raise TemplateSyntaxError("Unclosed argument to tinycontent.")
def render(self, context):
try:
name = self.get_content_name(context)
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return self.nodelist.render(context)
@register.tag
def tinycontent(parser, token):
args = token.split_contents()
if len(args) != 2:
raise TemplateSyntaxError("'tinycontent' tag takes exactly one"
" argument.")
content_name = args[1]
nodelist = parser.parse(('endtinycontent',))
parser.delete_first_token()
return TinyContentNode(content_name, nodelist)
@register.simple_tag
def tinycontent_simple(name):
try:
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return ''
|
from django import template
from django.template.base import TemplateSyntaxError
from tinycontent.models import TinyContent
register = template.Library()
class TinyContentNode(template.Node):
def __init__(self, content_name, nodelist):
self.content_name = content_name
self.nodelist = nodelist
def render(self, context):
try:
name = self.content_name.resolve(context)
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return self.nodelist.render(context)
@register.tag
def tinycontent(parser, token):
args = token.split_contents()
if len(args) != 2:
raise TemplateSyntaxError("'tinycontent' tag takes exactly one"
" argument.")
content_name = parser.compile_filter(args[1])
nodelist = parser.parse(('endtinycontent',))
parser.delete_first_token()
return TinyContentNode(content_name, nodelist)
@register.simple_tag
def tinycontent_simple(name):
try:
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return ''
|
Use parser.compile_filter instead of my half-baked attempt
|
Use parser.compile_filter instead of my half-baked attempt
|
Python
|
bsd-3-clause
|
dominicrodger/django-tinycontent,ad-m/django-tinycontent,watchdogpolska/django-tinycontent,ad-m/django-tinycontent,watchdogpolska/django-tinycontent,dominicrodger/django-tinycontent
|
from django import template
from django.template.base import TemplateSyntaxError
from tinycontent.models import TinyContent
register = template.Library()
class TinyContentNode(template.Node):
def __init__(self, content_name, nodelist):
self.content_name = content_name
self.nodelist = nodelist
def get_content_name(self, context):
if self.content_name[0] != '"' and self.content_name[0] != "'":
try:
return context[self.content_name]
except KeyError:
raise TinyContent.DoesNotExist
if self.content_name[0] == '"' and self.content_name[-1] == '"':
return self.content_name[1:-1]
if self.content_name[0] == "'" and self.content_name[-1] == "'":
return self.content_name[1:-1]
raise TemplateSyntaxError("Unclosed argument to tinycontent.")
def render(self, context):
try:
name = self.get_content_name(context)
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return self.nodelist.render(context)
@register.tag
def tinycontent(parser, token):
args = token.split_contents()
if len(args) != 2:
raise TemplateSyntaxError("'tinycontent' tag takes exactly one"
" argument.")
content_name = args[1]
nodelist = parser.parse(('endtinycontent',))
parser.delete_first_token()
return TinyContentNode(content_name, nodelist)
@register.simple_tag
def tinycontent_simple(name):
try:
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return ''
Use parser.compile_filter instead of my half-baked attempt
|
from django import template
from django.template.base import TemplateSyntaxError
from tinycontent.models import TinyContent
register = template.Library()
class TinyContentNode(template.Node):
def __init__(self, content_name, nodelist):
self.content_name = content_name
self.nodelist = nodelist
def render(self, context):
try:
name = self.content_name.resolve(context)
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return self.nodelist.render(context)
@register.tag
def tinycontent(parser, token):
args = token.split_contents()
if len(args) != 2:
raise TemplateSyntaxError("'tinycontent' tag takes exactly one"
" argument.")
content_name = parser.compile_filter(args[1])
nodelist = parser.parse(('endtinycontent',))
parser.delete_first_token()
return TinyContentNode(content_name, nodelist)
@register.simple_tag
def tinycontent_simple(name):
try:
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return ''
|
<commit_before>from django import template
from django.template.base import TemplateSyntaxError
from tinycontent.models import TinyContent
register = template.Library()
class TinyContentNode(template.Node):
def __init__(self, content_name, nodelist):
self.content_name = content_name
self.nodelist = nodelist
def get_content_name(self, context):
if self.content_name[0] != '"' and self.content_name[0] != "'":
try:
return context[self.content_name]
except KeyError:
raise TinyContent.DoesNotExist
if self.content_name[0] == '"' and self.content_name[-1] == '"':
return self.content_name[1:-1]
if self.content_name[0] == "'" and self.content_name[-1] == "'":
return self.content_name[1:-1]
raise TemplateSyntaxError("Unclosed argument to tinycontent.")
def render(self, context):
try:
name = self.get_content_name(context)
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return self.nodelist.render(context)
@register.tag
def tinycontent(parser, token):
args = token.split_contents()
if len(args) != 2:
raise TemplateSyntaxError("'tinycontent' tag takes exactly one"
" argument.")
content_name = args[1]
nodelist = parser.parse(('endtinycontent',))
parser.delete_first_token()
return TinyContentNode(content_name, nodelist)
@register.simple_tag
def tinycontent_simple(name):
try:
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return ''
<commit_msg>Use parser.compile_filter instead of my half-baked attempt<commit_after>
|
from django import template
from django.template.base import TemplateSyntaxError
from tinycontent.models import TinyContent
register = template.Library()
class TinyContentNode(template.Node):
def __init__(self, content_name, nodelist):
self.content_name = content_name
self.nodelist = nodelist
def render(self, context):
try:
name = self.content_name.resolve(context)
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return self.nodelist.render(context)
@register.tag
def tinycontent(parser, token):
args = token.split_contents()
if len(args) != 2:
raise TemplateSyntaxError("'tinycontent' tag takes exactly one"
" argument.")
content_name = parser.compile_filter(args[1])
nodelist = parser.parse(('endtinycontent',))
parser.delete_first_token()
return TinyContentNode(content_name, nodelist)
@register.simple_tag
def tinycontent_simple(name):
try:
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return ''
|
from django import template
from django.template.base import TemplateSyntaxError
from tinycontent.models import TinyContent
register = template.Library()
class TinyContentNode(template.Node):
def __init__(self, content_name, nodelist):
self.content_name = content_name
self.nodelist = nodelist
def get_content_name(self, context):
if self.content_name[0] != '"' and self.content_name[0] != "'":
try:
return context[self.content_name]
except KeyError:
raise TinyContent.DoesNotExist
if self.content_name[0] == '"' and self.content_name[-1] == '"':
return self.content_name[1:-1]
if self.content_name[0] == "'" and self.content_name[-1] == "'":
return self.content_name[1:-1]
raise TemplateSyntaxError("Unclosed argument to tinycontent.")
def render(self, context):
try:
name = self.get_content_name(context)
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return self.nodelist.render(context)
@register.tag
def tinycontent(parser, token):
args = token.split_contents()
if len(args) != 2:
raise TemplateSyntaxError("'tinycontent' tag takes exactly one"
" argument.")
content_name = args[1]
nodelist = parser.parse(('endtinycontent',))
parser.delete_first_token()
return TinyContentNode(content_name, nodelist)
@register.simple_tag
def tinycontent_simple(name):
try:
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return ''
Use parser.compile_filter instead of my half-baked attemptfrom django import template
from django.template.base import TemplateSyntaxError
from tinycontent.models import TinyContent
register = template.Library()
class TinyContentNode(template.Node):
def __init__(self, content_name, nodelist):
self.content_name = content_name
self.nodelist = nodelist
def render(self, context):
try:
name = self.content_name.resolve(context)
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return self.nodelist.render(context)
@register.tag
def tinycontent(parser, token):
args = token.split_contents()
if len(args) != 2:
raise TemplateSyntaxError("'tinycontent' tag takes exactly one"
" argument.")
content_name = parser.compile_filter(args[1])
nodelist = parser.parse(('endtinycontent',))
parser.delete_first_token()
return TinyContentNode(content_name, nodelist)
@register.simple_tag
def tinycontent_simple(name):
try:
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return ''
|
<commit_before>from django import template
from django.template.base import TemplateSyntaxError
from tinycontent.models import TinyContent
register = template.Library()
class TinyContentNode(template.Node):
def __init__(self, content_name, nodelist):
self.content_name = content_name
self.nodelist = nodelist
def get_content_name(self, context):
if self.content_name[0] != '"' and self.content_name[0] != "'":
try:
return context[self.content_name]
except KeyError:
raise TinyContent.DoesNotExist
if self.content_name[0] == '"' and self.content_name[-1] == '"':
return self.content_name[1:-1]
if self.content_name[0] == "'" and self.content_name[-1] == "'":
return self.content_name[1:-1]
raise TemplateSyntaxError("Unclosed argument to tinycontent.")
def render(self, context):
try:
name = self.get_content_name(context)
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return self.nodelist.render(context)
@register.tag
def tinycontent(parser, token):
args = token.split_contents()
if len(args) != 2:
raise TemplateSyntaxError("'tinycontent' tag takes exactly one"
" argument.")
content_name = args[1]
nodelist = parser.parse(('endtinycontent',))
parser.delete_first_token()
return TinyContentNode(content_name, nodelist)
@register.simple_tag
def tinycontent_simple(name):
try:
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return ''
<commit_msg>Use parser.compile_filter instead of my half-baked attempt<commit_after>from django import template
from django.template.base import TemplateSyntaxError
from tinycontent.models import TinyContent
register = template.Library()
class TinyContentNode(template.Node):
def __init__(self, content_name, nodelist):
self.content_name = content_name
self.nodelist = nodelist
def render(self, context):
try:
name = self.content_name.resolve(context)
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return self.nodelist.render(context)
@register.tag
def tinycontent(parser, token):
args = token.split_contents()
if len(args) != 2:
raise TemplateSyntaxError("'tinycontent' tag takes exactly one"
" argument.")
content_name = parser.compile_filter(args[1])
nodelist = parser.parse(('endtinycontent',))
parser.delete_first_token()
return TinyContentNode(content_name, nodelist)
@register.simple_tag
def tinycontent_simple(name):
try:
obj = TinyContent.objects.get(name=name)
return obj.content
except TinyContent.DoesNotExist:
return ''
|
1736883e6635a13aa896209e3649c9b30b87b54d
|
bin/create_contour.py
|
bin/create_contour.py
|
#!/usr/bin/env python3
import sys
import os
sys.path.append('./climatemaps')
import climatemaps
DATA_DIR = './website/data'
def test():
for month in range(1, 13):
latrange, lonrange, Z = climatemaps.data.import_climate_data(month)
filepath_out = os.path.join(DATA_DIR, 'contour_cloud_' + str(month) + '.json')
test_config = climatemaps.contour.ContourPlotConfig()
contourmap = climatemaps.contour.Contour(test_config, lonrange, latrange, Z)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
test()
|
#!/usr/bin/env python3
import sys
import os
sys.path.append('./climatemaps')
import climatemaps
DATA_OUT_DIR = './website/data'
TYPES = {
'precipitation': './data/precipitation/cpre6190.dat',
'cloud': './data/cloud/ccld6190.dat',
}
def main():
for data_type, filepath in TYPES.items():
for month in range(1, 13):
latrange, lonrange, Z = climatemaps.data.import_climate_data(filepath, month)
filepath_out = os.path.join(DATA_OUT_DIR, 'contour_' + data_type +'_' + str(month) + '.json')
test_config = climatemaps.contour.ContourPlotConfig()
contourmap = climatemaps.contour.Contour(test_config, lonrange, latrange, Z)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
main()
|
Create contour data for multiple climate data types
|
Create contour data for multiple climate data types
|
Python
|
mit
|
bartromgens/climatemaps,bartromgens/climatemaps,bartromgens/climatemaps
|
#!/usr/bin/env python3
import sys
import os
sys.path.append('./climatemaps')
import climatemaps
DATA_DIR = './website/data'
def test():
for month in range(1, 13):
latrange, lonrange, Z = climatemaps.data.import_climate_data(month)
filepath_out = os.path.join(DATA_DIR, 'contour_cloud_' + str(month) + '.json')
test_config = climatemaps.contour.ContourPlotConfig()
contourmap = climatemaps.contour.Contour(test_config, lonrange, latrange, Z)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
test()
Create contour data for multiple climate data types
|
#!/usr/bin/env python3
import sys
import os
sys.path.append('./climatemaps')
import climatemaps
DATA_OUT_DIR = './website/data'
TYPES = {
'precipitation': './data/precipitation/cpre6190.dat',
'cloud': './data/cloud/ccld6190.dat',
}
def main():
for data_type, filepath in TYPES.items():
for month in range(1, 13):
latrange, lonrange, Z = climatemaps.data.import_climate_data(filepath, month)
filepath_out = os.path.join(DATA_OUT_DIR, 'contour_' + data_type +'_' + str(month) + '.json')
test_config = climatemaps.contour.ContourPlotConfig()
contourmap = climatemaps.contour.Contour(test_config, lonrange, latrange, Z)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
import sys
import os
sys.path.append('./climatemaps')
import climatemaps
DATA_DIR = './website/data'
def test():
for month in range(1, 13):
latrange, lonrange, Z = climatemaps.data.import_climate_data(month)
filepath_out = os.path.join(DATA_DIR, 'contour_cloud_' + str(month) + '.json')
test_config = climatemaps.contour.ContourPlotConfig()
contourmap = climatemaps.contour.Contour(test_config, lonrange, latrange, Z)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
test()
<commit_msg>Create contour data for multiple climate data types<commit_after>
|
#!/usr/bin/env python3
import sys
import os
sys.path.append('./climatemaps')
import climatemaps
DATA_OUT_DIR = './website/data'
TYPES = {
'precipitation': './data/precipitation/cpre6190.dat',
'cloud': './data/cloud/ccld6190.dat',
}
def main():
for data_type, filepath in TYPES.items():
for month in range(1, 13):
latrange, lonrange, Z = climatemaps.data.import_climate_data(filepath, month)
filepath_out = os.path.join(DATA_OUT_DIR, 'contour_' + data_type +'_' + str(month) + '.json')
test_config = climatemaps.contour.ContourPlotConfig()
contourmap = climatemaps.contour.Contour(test_config, lonrange, latrange, Z)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import sys
import os
sys.path.append('./climatemaps')
import climatemaps
DATA_DIR = './website/data'
def test():
for month in range(1, 13):
latrange, lonrange, Z = climatemaps.data.import_climate_data(month)
filepath_out = os.path.join(DATA_DIR, 'contour_cloud_' + str(month) + '.json')
test_config = climatemaps.contour.ContourPlotConfig()
contourmap = climatemaps.contour.Contour(test_config, lonrange, latrange, Z)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
test()
Create contour data for multiple climate data types#!/usr/bin/env python3
import sys
import os
sys.path.append('./climatemaps')
import climatemaps
DATA_OUT_DIR = './website/data'
TYPES = {
'precipitation': './data/precipitation/cpre6190.dat',
'cloud': './data/cloud/ccld6190.dat',
}
def main():
for data_type, filepath in TYPES.items():
for month in range(1, 13):
latrange, lonrange, Z = climatemaps.data.import_climate_data(filepath, month)
filepath_out = os.path.join(DATA_OUT_DIR, 'contour_' + data_type +'_' + str(month) + '.json')
test_config = climatemaps.contour.ContourPlotConfig()
contourmap = climatemaps.contour.Contour(test_config, lonrange, latrange, Z)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
import sys
import os
sys.path.append('./climatemaps')
import climatemaps
DATA_DIR = './website/data'
def test():
for month in range(1, 13):
latrange, lonrange, Z = climatemaps.data.import_climate_data(month)
filepath_out = os.path.join(DATA_DIR, 'contour_cloud_' + str(month) + '.json')
test_config = climatemaps.contour.ContourPlotConfig()
contourmap = climatemaps.contour.Contour(test_config, lonrange, latrange, Z)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
test()
<commit_msg>Create contour data for multiple climate data types<commit_after>#!/usr/bin/env python3
import sys
import os
sys.path.append('./climatemaps')
import climatemaps
DATA_OUT_DIR = './website/data'
TYPES = {
'precipitation': './data/precipitation/cpre6190.dat',
'cloud': './data/cloud/ccld6190.dat',
}
def main():
for data_type, filepath in TYPES.items():
for month in range(1, 13):
latrange, lonrange, Z = climatemaps.data.import_climate_data(filepath, month)
filepath_out = os.path.join(DATA_OUT_DIR, 'contour_' + data_type +'_' + str(month) + '.json')
test_config = climatemaps.contour.ContourPlotConfig()
contourmap = climatemaps.contour.Contour(test_config, lonrange, latrange, Z)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
main()
|
f7faebbd91b4dc0fcd11e10d215d752badc899d6
|
aspc/senate/views.py
|
aspc/senate/views.py
|
from django.views.generic import ListView
from aspc.senate.models import Document, Appointment
import datetime
class DocumentList(ListView):
model = Document
context_object_name = 'documents'
paginate_by = 20
class AppointmentList(ListView):
model = Appointment
context_object_name = 'appointments'
def get_queryset(self, *args, **kwargs):
qs = super(AppointmentList, self).get_queryset(*args, **kwargs)
qs = qs.filter(end__isnull=True)
qs |= qs.filter(end__gte=datetime.datetime.now())
qs = qs.order_by('position__sort_order')
return qs
|
from django.views.generic import ListView
from aspc.senate.models import Document, Appointment
import datetime
class DocumentList(ListView):
model = Document
context_object_name = 'documents'
paginate_by = 20
class AppointmentList(ListView):
model = Appointment
context_object_name = 'appointments'
def get_queryset(self, *args, **kwargs):
all_qs = super(AppointmentList, self).get_queryset(*args, **kwargs)
qs = all_qs.filter(end__isnull=True)
qs |= all_qs.filter(end__gte=datetime.datetime.now())
qs = qs.order_by('position__sort_order')
return qs
|
Change queryset filtering for positions view
|
Change queryset filtering for positions view
|
Python
|
mit
|
theworldbright/mainsite,theworldbright/mainsite,theworldbright/mainsite,aspc/mainsite,aspc/mainsite,theworldbright/mainsite,aspc/mainsite,aspc/mainsite
|
from django.views.generic import ListView
from aspc.senate.models import Document, Appointment
import datetime
class DocumentList(ListView):
model = Document
context_object_name = 'documents'
paginate_by = 20
class AppointmentList(ListView):
model = Appointment
context_object_name = 'appointments'
def get_queryset(self, *args, **kwargs):
qs = super(AppointmentList, self).get_queryset(*args, **kwargs)
qs = qs.filter(end__isnull=True)
qs |= qs.filter(end__gte=datetime.datetime.now())
qs = qs.order_by('position__sort_order')
return qs
Change queryset filtering for positions view
|
from django.views.generic import ListView
from aspc.senate.models import Document, Appointment
import datetime
class DocumentList(ListView):
model = Document
context_object_name = 'documents'
paginate_by = 20
class AppointmentList(ListView):
model = Appointment
context_object_name = 'appointments'
def get_queryset(self, *args, **kwargs):
all_qs = super(AppointmentList, self).get_queryset(*args, **kwargs)
qs = all_qs.filter(end__isnull=True)
qs |= all_qs.filter(end__gte=datetime.datetime.now())
qs = qs.order_by('position__sort_order')
return qs
|
<commit_before>from django.views.generic import ListView
from aspc.senate.models import Document, Appointment
import datetime
class DocumentList(ListView):
model = Document
context_object_name = 'documents'
paginate_by = 20
class AppointmentList(ListView):
model = Appointment
context_object_name = 'appointments'
def get_queryset(self, *args, **kwargs):
qs = super(AppointmentList, self).get_queryset(*args, **kwargs)
qs = qs.filter(end__isnull=True)
qs |= qs.filter(end__gte=datetime.datetime.now())
qs = qs.order_by('position__sort_order')
return qs
<commit_msg>Change queryset filtering for positions view<commit_after>
|
from django.views.generic import ListView
from aspc.senate.models import Document, Appointment
import datetime
class DocumentList(ListView):
model = Document
context_object_name = 'documents'
paginate_by = 20
class AppointmentList(ListView):
model = Appointment
context_object_name = 'appointments'
def get_queryset(self, *args, **kwargs):
all_qs = super(AppointmentList, self).get_queryset(*args, **kwargs)
qs = all_qs.filter(end__isnull=True)
qs |= all_qs.filter(end__gte=datetime.datetime.now())
qs = qs.order_by('position__sort_order')
return qs
|
from django.views.generic import ListView
from aspc.senate.models import Document, Appointment
import datetime
class DocumentList(ListView):
model = Document
context_object_name = 'documents'
paginate_by = 20
class AppointmentList(ListView):
model = Appointment
context_object_name = 'appointments'
def get_queryset(self, *args, **kwargs):
qs = super(AppointmentList, self).get_queryset(*args, **kwargs)
qs = qs.filter(end__isnull=True)
qs |= qs.filter(end__gte=datetime.datetime.now())
qs = qs.order_by('position__sort_order')
return qs
Change queryset filtering for positions viewfrom django.views.generic import ListView
from aspc.senate.models import Document, Appointment
import datetime
class DocumentList(ListView):
model = Document
context_object_name = 'documents'
paginate_by = 20
class AppointmentList(ListView):
model = Appointment
context_object_name = 'appointments'
def get_queryset(self, *args, **kwargs):
all_qs = super(AppointmentList, self).get_queryset(*args, **kwargs)
qs = all_qs.filter(end__isnull=True)
qs |= all_qs.filter(end__gte=datetime.datetime.now())
qs = qs.order_by('position__sort_order')
return qs
|
<commit_before>from django.views.generic import ListView
from aspc.senate.models import Document, Appointment
import datetime
class DocumentList(ListView):
model = Document
context_object_name = 'documents'
paginate_by = 20
class AppointmentList(ListView):
model = Appointment
context_object_name = 'appointments'
def get_queryset(self, *args, **kwargs):
qs = super(AppointmentList, self).get_queryset(*args, **kwargs)
qs = qs.filter(end__isnull=True)
qs |= qs.filter(end__gte=datetime.datetime.now())
qs = qs.order_by('position__sort_order')
return qs
<commit_msg>Change queryset filtering for positions view<commit_after>from django.views.generic import ListView
from aspc.senate.models import Document, Appointment
import datetime
class DocumentList(ListView):
model = Document
context_object_name = 'documents'
paginate_by = 20
class AppointmentList(ListView):
model = Appointment
context_object_name = 'appointments'
def get_queryset(self, *args, **kwargs):
all_qs = super(AppointmentList, self).get_queryset(*args, **kwargs)
qs = all_qs.filter(end__isnull=True)
qs |= all_qs.filter(end__gte=datetime.datetime.now())
qs = qs.order_by('position__sort_order')
return qs
|
848d783bd988e0cdf31b690f17837ac02e77b43a
|
pypodio2/client.py
|
pypodio2/client.py
|
# -*- coding: utf-8 -*-
from . import areas
class FailedRequest(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return repr(self.error)
class Client(object):
"""
The Podio API client. Callers should use the factory method OAuthClient to create instances.
"""
def __init__(self, transport):
self.transport = transport
def __getattr__(self, name):
new_trans = self.transport
area = getattr(areas, name)
return area(new_trans)
|
# -*- coding: utf-8 -*-
from . import areas
class FailedRequest(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return repr(self.error)
class Client(object):
"""
The Podio API client. Callers should use the factory method OAuthClient to create instances.
"""
def __init__(self, transport):
self.transport = transport
def __getattr__(self, name):
new_trans = self.transport
area = getattr(areas, name)
return area(new_trans)
def __dir__(self):
"""
Should return list of attribute names.
Since __getattr__ looks in areas, we simply list the content of the areas module
"""
return dir(areas)
|
Add __dir__ method to Client in order to allow autocompletion in interactive terminals, etc.
|
Add __dir__ method to Client in order to allow autocompletion in interactive terminals, etc.
|
Python
|
mit
|
podio/podio-py
|
# -*- coding: utf-8 -*-
from . import areas
class FailedRequest(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return repr(self.error)
class Client(object):
"""
The Podio API client. Callers should use the factory method OAuthClient to create instances.
"""
def __init__(self, transport):
self.transport = transport
def __getattr__(self, name):
new_trans = self.transport
area = getattr(areas, name)
return area(new_trans)
Add __dir__ method to Client in order to allow autocompletion in interactive terminals, etc.
|
# -*- coding: utf-8 -*-
from . import areas
class FailedRequest(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return repr(self.error)
class Client(object):
"""
The Podio API client. Callers should use the factory method OAuthClient to create instances.
"""
def __init__(self, transport):
self.transport = transport
def __getattr__(self, name):
new_trans = self.transport
area = getattr(areas, name)
return area(new_trans)
def __dir__(self):
"""
Should return list of attribute names.
Since __getattr__ looks in areas, we simply list the content of the areas module
"""
return dir(areas)
|
<commit_before># -*- coding: utf-8 -*-
from . import areas
class FailedRequest(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return repr(self.error)
class Client(object):
"""
The Podio API client. Callers should use the factory method OAuthClient to create instances.
"""
def __init__(self, transport):
self.transport = transport
def __getattr__(self, name):
new_trans = self.transport
area = getattr(areas, name)
return area(new_trans)
<commit_msg>Add __dir__ method to Client in order to allow autocompletion in interactive terminals, etc.<commit_after>
|
# -*- coding: utf-8 -*-
from . import areas
class FailedRequest(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return repr(self.error)
class Client(object):
"""
The Podio API client. Callers should use the factory method OAuthClient to create instances.
"""
def __init__(self, transport):
self.transport = transport
def __getattr__(self, name):
new_trans = self.transport
area = getattr(areas, name)
return area(new_trans)
def __dir__(self):
"""
Should return list of attribute names.
Since __getattr__ looks in areas, we simply list the content of the areas module
"""
return dir(areas)
|
# -*- coding: utf-8 -*-
from . import areas
class FailedRequest(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return repr(self.error)
class Client(object):
"""
The Podio API client. Callers should use the factory method OAuthClient to create instances.
"""
def __init__(self, transport):
self.transport = transport
def __getattr__(self, name):
new_trans = self.transport
area = getattr(areas, name)
return area(new_trans)
Add __dir__ method to Client in order to allow autocompletion in interactive terminals, etc.# -*- coding: utf-8 -*-
from . import areas
class FailedRequest(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return repr(self.error)
class Client(object):
"""
The Podio API client. Callers should use the factory method OAuthClient to create instances.
"""
def __init__(self, transport):
self.transport = transport
def __getattr__(self, name):
new_trans = self.transport
area = getattr(areas, name)
return area(new_trans)
def __dir__(self):
"""
Should return list of attribute names.
Since __getattr__ looks in areas, we simply list the content of the areas module
"""
return dir(areas)
|
<commit_before># -*- coding: utf-8 -*-
from . import areas
class FailedRequest(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return repr(self.error)
class Client(object):
"""
The Podio API client. Callers should use the factory method OAuthClient to create instances.
"""
def __init__(self, transport):
self.transport = transport
def __getattr__(self, name):
new_trans = self.transport
area = getattr(areas, name)
return area(new_trans)
<commit_msg>Add __dir__ method to Client in order to allow autocompletion in interactive terminals, etc.<commit_after># -*- coding: utf-8 -*-
from . import areas
class FailedRequest(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return repr(self.error)
class Client(object):
"""
The Podio API client. Callers should use the factory method OAuthClient to create instances.
"""
def __init__(self, transport):
self.transport = transport
def __getattr__(self, name):
new_trans = self.transport
area = getattr(areas, name)
return area(new_trans)
def __dir__(self):
"""
Should return list of attribute names.
Since __getattr__ looks in areas, we simply list the content of the areas module
"""
return dir(areas)
|
8e7feb7bc09feeca8d3fa0ea9ce6b76edec61ff1
|
test/contrib/test_pyopenssl.py
|
test/contrib/test_pyopenssl.py
|
from urllib3.packages import six
if six.PY3:
from nose.plugins.skip import SkipTest
raise SkipTest('Testing of PyOpenSSL disabled')
from urllib3.contrib.pyopenssl import (inject_into_urllib3,
extract_from_urllib3)
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1
from ..with_dummyserver.test_socketlevel import TestSNI, TestSocketClosing
def setup_module():
inject_into_urllib3()
def teardown_module():
extract_from_urllib3()
|
from nose.plugins.skip import SkipTest
from urllib3.packages import six
if six.PY3:
raise SkipTest('Testing of PyOpenSSL disabled on PY3')
try:
from urllib3.contrib.pyopenssl import (inject_into_urllib3,
extract_from_urllib3)
except ImportError as e:
raise SkipTest('Could not import PyOpenSSL: %r' % e)
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1
from ..with_dummyserver.test_socketlevel import TestSNI, TestSocketClosing
def setup_module():
inject_into_urllib3()
def teardown_module():
extract_from_urllib3()
|
Disable PyOpenSSL tests by default.
|
Disable PyOpenSSL tests by default.
|
Python
|
mit
|
Lukasa/urllib3,matejcik/urllib3,asmeurer/urllib3,sornars/urllib3,silveringsea/urllib3,denim2x/urllib3,sornars/urllib3,Geoion/urllib3,haikuginger/urllib3,matejcik/urllib3,Geoion/urllib3,boyxuper/urllib3,urllib3/urllib3,haikuginger/urllib3,sileht/urllib3,sigmavirus24/urllib3,gardner/urllib3,silveringsea/urllib3,luca3m/urllib3,msabramo/urllib3,boyxuper/urllib3,sigmavirus24/urllib3,Disassem/urllib3,Disassem/urllib3,mikelambert/urllib3,denim2x/urllib3,tutumcloud/urllib3,msabramo/urllib3,sileht/urllib3,urllib3/urllib3,asmeurer/urllib3,mikelambert/urllib3,gardner/urllib3,Lukasa/urllib3,luca3m/urllib3,tutumcloud/urllib3
|
from urllib3.packages import six
if six.PY3:
from nose.plugins.skip import SkipTest
raise SkipTest('Testing of PyOpenSSL disabled')
from urllib3.contrib.pyopenssl import (inject_into_urllib3,
extract_from_urllib3)
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1
from ..with_dummyserver.test_socketlevel import TestSNI, TestSocketClosing
def setup_module():
inject_into_urllib3()
def teardown_module():
extract_from_urllib3()
Disable PyOpenSSL tests by default.
|
from nose.plugins.skip import SkipTest
from urllib3.packages import six
if six.PY3:
raise SkipTest('Testing of PyOpenSSL disabled on PY3')
try:
from urllib3.contrib.pyopenssl import (inject_into_urllib3,
extract_from_urllib3)
except ImportError as e:
raise SkipTest('Could not import PyOpenSSL: %r' % e)
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1
from ..with_dummyserver.test_socketlevel import TestSNI, TestSocketClosing
def setup_module():
inject_into_urllib3()
def teardown_module():
extract_from_urllib3()
|
<commit_before>from urllib3.packages import six
if six.PY3:
from nose.plugins.skip import SkipTest
raise SkipTest('Testing of PyOpenSSL disabled')
from urllib3.contrib.pyopenssl import (inject_into_urllib3,
extract_from_urllib3)
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1
from ..with_dummyserver.test_socketlevel import TestSNI, TestSocketClosing
def setup_module():
inject_into_urllib3()
def teardown_module():
extract_from_urllib3()
<commit_msg>Disable PyOpenSSL tests by default.<commit_after>
|
from nose.plugins.skip import SkipTest
from urllib3.packages import six
if six.PY3:
raise SkipTest('Testing of PyOpenSSL disabled on PY3')
try:
from urllib3.contrib.pyopenssl import (inject_into_urllib3,
extract_from_urllib3)
except ImportError as e:
raise SkipTest('Could not import PyOpenSSL: %r' % e)
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1
from ..with_dummyserver.test_socketlevel import TestSNI, TestSocketClosing
def setup_module():
inject_into_urllib3()
def teardown_module():
extract_from_urllib3()
|
from urllib3.packages import six
if six.PY3:
from nose.plugins.skip import SkipTest
raise SkipTest('Testing of PyOpenSSL disabled')
from urllib3.contrib.pyopenssl import (inject_into_urllib3,
extract_from_urllib3)
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1
from ..with_dummyserver.test_socketlevel import TestSNI, TestSocketClosing
def setup_module():
inject_into_urllib3()
def teardown_module():
extract_from_urllib3()
Disable PyOpenSSL tests by default.from nose.plugins.skip import SkipTest
from urllib3.packages import six
if six.PY3:
raise SkipTest('Testing of PyOpenSSL disabled on PY3')
try:
from urllib3.contrib.pyopenssl import (inject_into_urllib3,
extract_from_urllib3)
except ImportError as e:
raise SkipTest('Could not import PyOpenSSL: %r' % e)
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1
from ..with_dummyserver.test_socketlevel import TestSNI, TestSocketClosing
def setup_module():
inject_into_urllib3()
def teardown_module():
extract_from_urllib3()
|
<commit_before>from urllib3.packages import six
if six.PY3:
from nose.plugins.skip import SkipTest
raise SkipTest('Testing of PyOpenSSL disabled')
from urllib3.contrib.pyopenssl import (inject_into_urllib3,
extract_from_urllib3)
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1
from ..with_dummyserver.test_socketlevel import TestSNI, TestSocketClosing
def setup_module():
inject_into_urllib3()
def teardown_module():
extract_from_urllib3()
<commit_msg>Disable PyOpenSSL tests by default.<commit_after>from nose.plugins.skip import SkipTest
from urllib3.packages import six
if six.PY3:
raise SkipTest('Testing of PyOpenSSL disabled on PY3')
try:
from urllib3.contrib.pyopenssl import (inject_into_urllib3,
extract_from_urllib3)
except ImportError as e:
raise SkipTest('Could not import PyOpenSSL: %r' % e)
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1
from ..with_dummyserver.test_socketlevel import TestSNI, TestSocketClosing
def setup_module():
inject_into_urllib3()
def teardown_module():
extract_from_urllib3()
|
edd716204f1fc3337d46b74ed5708d5d0533f586
|
km3pipe/__init__.py
|
km3pipe/__init__.py
|
# coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from km3pipe.__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from km3pipe import io # noqa
from km3pipe import utils # noqa
from km3pipe import srv # noqa
from km3pipe.srv import srv_event # noqa
from km3pipe.io import GenericPump, read_hdf5 # noqa
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
|
# coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from km3pipe.__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from km3pipe import io # noqa
from km3pipe import utils # noqa
from km3pipe import srv # noqa
from km3pipe.srv import srv_event # noqa
from km3pipe.io import GenericPump, read_hdf5 # noqa
import os
mplstyle = os.path.dirname(kp.__file__) + '/kp-data/km3pipe.mplstyle'
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
|
Use better name for matplotlib style
|
Use better name for matplotlib style
|
Python
|
mit
|
tamasgal/km3pipe,tamasgal/km3pipe
|
# coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from km3pipe.__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from km3pipe import io # noqa
from km3pipe import utils # noqa
from km3pipe import srv # noqa
from km3pipe.srv import srv_event # noqa
from km3pipe.io import GenericPump, read_hdf5 # noqa
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
Use better name for matplotlib style
|
# coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from km3pipe.__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from km3pipe import io # noqa
from km3pipe import utils # noqa
from km3pipe import srv # noqa
from km3pipe.srv import srv_event # noqa
from km3pipe.io import GenericPump, read_hdf5 # noqa
import os
mplstyle = os.path.dirname(kp.__file__) + '/kp-data/km3pipe.mplstyle'
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
|
<commit_before># coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from km3pipe.__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from km3pipe import io # noqa
from km3pipe import utils # noqa
from km3pipe import srv # noqa
from km3pipe.srv import srv_event # noqa
from km3pipe.io import GenericPump, read_hdf5 # noqa
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
<commit_msg>Use better name for matplotlib style<commit_after>
|
# coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from km3pipe.__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from km3pipe import io # noqa
from km3pipe import utils # noqa
from km3pipe import srv # noqa
from km3pipe.srv import srv_event # noqa
from km3pipe.io import GenericPump, read_hdf5 # noqa
import os
mplstyle = os.path.dirname(kp.__file__) + '/kp-data/km3pipe.mplstyle'
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
|
# coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from km3pipe.__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from km3pipe import io # noqa
from km3pipe import utils # noqa
from km3pipe import srv # noqa
from km3pipe.srv import srv_event # noqa
from km3pipe.io import GenericPump, read_hdf5 # noqa
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
Use better name for matplotlib style# coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from km3pipe.__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from km3pipe import io # noqa
from km3pipe import utils # noqa
from km3pipe import srv # noqa
from km3pipe.srv import srv_event # noqa
from km3pipe.io import GenericPump, read_hdf5 # noqa
import os
mplstyle = os.path.dirname(kp.__file__) + '/kp-data/km3pipe.mplstyle'
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
|
<commit_before># coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from km3pipe.__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from km3pipe import io # noqa
from km3pipe import utils # noqa
from km3pipe import srv # noqa
from km3pipe.srv import srv_event # noqa
from km3pipe.io import GenericPump, read_hdf5 # noqa
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
<commit_msg>Use better name for matplotlib style<commit_after># coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from km3pipe.__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from km3pipe import io # noqa
from km3pipe import utils # noqa
from km3pipe import srv # noqa
from km3pipe.srv import srv_event # noqa
from km3pipe.io import GenericPump, read_hdf5 # noqa
import os
mplstyle = os.path.dirname(kp.__file__) + '/kp-data/km3pipe.mplstyle'
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.