commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9442338d329e7f87d6265e0b0d3728a0df18d945
|
viper/lexer/reserved_tokens.py
|
viper/lexer/reserved_tokens.py
|
RESERVED_NAMES = {
'def',
'pass',
'return',
'class',
'interface',
'data',
'if',
'elif',
'else',
'or',
'and',
'not',
'for',
'true',
'false',
}
RESERVED_CLASSES = set()
|
RESERVED_NAMES = {
'def',
'pass',
'return',
'class',
'interface',
'data',
'static',
'public',
'private',
'protected',
'module',
'if',
'elif',
'else',
'or',
'and',
'not',
'for',
'true',
'false',
}
RESERVED_CLASSES = set()
|
Update list of reserved tokens
|
Update list of reserved tokens
|
Python
|
apache-2.0
|
pdarragh/Viper
|
RESERVED_NAMES = {
'def',
'pass',
'return',
'class',
'interface',
'data',
'if',
'elif',
'else',
'or',
'and',
'not',
'for',
'true',
'false',
}
RESERVED_CLASSES = set()
Update list of reserved tokens
|
RESERVED_NAMES = {
'def',
'pass',
'return',
'class',
'interface',
'data',
'static',
'public',
'private',
'protected',
'module',
'if',
'elif',
'else',
'or',
'and',
'not',
'for',
'true',
'false',
}
RESERVED_CLASSES = set()
|
<commit_before>RESERVED_NAMES = {
'def',
'pass',
'return',
'class',
'interface',
'data',
'if',
'elif',
'else',
'or',
'and',
'not',
'for',
'true',
'false',
}
RESERVED_CLASSES = set()
<commit_msg>Update list of reserved tokens<commit_after>
|
RESERVED_NAMES = {
'def',
'pass',
'return',
'class',
'interface',
'data',
'static',
'public',
'private',
'protected',
'module',
'if',
'elif',
'else',
'or',
'and',
'not',
'for',
'true',
'false',
}
RESERVED_CLASSES = set()
|
RESERVED_NAMES = {
'def',
'pass',
'return',
'class',
'interface',
'data',
'if',
'elif',
'else',
'or',
'and',
'not',
'for',
'true',
'false',
}
RESERVED_CLASSES = set()
Update list of reserved tokensRESERVED_NAMES = {
'def',
'pass',
'return',
'class',
'interface',
'data',
'static',
'public',
'private',
'protected',
'module',
'if',
'elif',
'else',
'or',
'and',
'not',
'for',
'true',
'false',
}
RESERVED_CLASSES = set()
|
<commit_before>RESERVED_NAMES = {
'def',
'pass',
'return',
'class',
'interface',
'data',
'if',
'elif',
'else',
'or',
'and',
'not',
'for',
'true',
'false',
}
RESERVED_CLASSES = set()
<commit_msg>Update list of reserved tokens<commit_after>RESERVED_NAMES = {
'def',
'pass',
'return',
'class',
'interface',
'data',
'static',
'public',
'private',
'protected',
'module',
'if',
'elif',
'else',
'or',
'and',
'not',
'for',
'true',
'false',
}
RESERVED_CLASSES = set()
|
c0ad52072dfe3ae489875c36a3a84561b43583a6
|
devtools/travis-ci/set_doc_version.py
|
devtools/travis-ci/set_doc_version.py
|
import os
import shutil
from yank import version
if version.release:
docversion = version.version
else:
docversion = 'latest'
os.mkdir("docs/_deploy")
shutil.copytree("docs/_build/html", "docs/_deploy/{docversion}"
.format(docversion=docversion))
|
import os
import shutil
from yank import version
if version.release:
docversion = version.version
else:
docversion = 'latest'
os.mkdir("docs/_deploy")
shutil.copytree("docs/_build", "docs/_deploy/{docversion}"
.format(docversion=docversion))
|
Set the correct doc build dir to copy
|
Set the correct doc build dir to copy
|
Python
|
mit
|
andrrizzi/yank,choderalab/yank,choderalab/yank,andrrizzi/yank,andrrizzi/yank
|
import os
import shutil
from yank import version
if version.release:
docversion = version.version
else:
docversion = 'latest'
os.mkdir("docs/_deploy")
shutil.copytree("docs/_build/html", "docs/_deploy/{docversion}"
.format(docversion=docversion))Set the correct doc build dir to copy
|
import os
import shutil
from yank import version
if version.release:
docversion = version.version
else:
docversion = 'latest'
os.mkdir("docs/_deploy")
shutil.copytree("docs/_build", "docs/_deploy/{docversion}"
.format(docversion=docversion))
|
<commit_before>import os
import shutil
from yank import version
if version.release:
docversion = version.version
else:
docversion = 'latest'
os.mkdir("docs/_deploy")
shutil.copytree("docs/_build/html", "docs/_deploy/{docversion}"
.format(docversion=docversion))<commit_msg>Set the correct doc build dir to copy<commit_after>
|
import os
import shutil
from yank import version
if version.release:
docversion = version.version
else:
docversion = 'latest'
os.mkdir("docs/_deploy")
shutil.copytree("docs/_build", "docs/_deploy/{docversion}"
.format(docversion=docversion))
|
import os
import shutil
from yank import version
if version.release:
docversion = version.version
else:
docversion = 'latest'
os.mkdir("docs/_deploy")
shutil.copytree("docs/_build/html", "docs/_deploy/{docversion}"
.format(docversion=docversion))Set the correct doc build dir to copyimport os
import shutil
from yank import version
if version.release:
docversion = version.version
else:
docversion = 'latest'
os.mkdir("docs/_deploy")
shutil.copytree("docs/_build", "docs/_deploy/{docversion}"
.format(docversion=docversion))
|
<commit_before>import os
import shutil
from yank import version
if version.release:
docversion = version.version
else:
docversion = 'latest'
os.mkdir("docs/_deploy")
shutil.copytree("docs/_build/html", "docs/_deploy/{docversion}"
.format(docversion=docversion))<commit_msg>Set the correct doc build dir to copy<commit_after>import os
import shutil
from yank import version
if version.release:
docversion = version.version
else:
docversion = 'latest'
os.mkdir("docs/_deploy")
shutil.copytree("docs/_build", "docs/_deploy/{docversion}"
.format(docversion=docversion))
|
53fb42f275050986072060a550e4fee09ab418f6
|
wagtail/wagtailadmin/checks.py
|
wagtail/wagtailadmin/checks.py
|
import os
from django.core.checks import Error, register
@register()
def css_install_check(app_configs, **kwargs):
errors = []
css_path = os.path.join(
os.path.dirname(__file__), 'static', 'wagtailadmin', 'css', 'normalize.css'
)
if not os.path.isfile(css_path):
error_hint = """
Most likely you are running a development (non-packaged) copy of
Wagtail and have not built the static assets -
see http://docs.wagtail.io/en/latest/contributing/developing.html
File not found: %s
""" % css_path
errors.append(
Error(
"CSS for the Wagtail admin is missing",
hint=error_hint,
id='wagtailadmin.E001',
)
)
return errors
|
import os
from django.core.checks import Warning, register
@register()
def css_install_check(app_configs, **kwargs):
errors = []
css_path = os.path.join(
os.path.dirname(__file__), 'static', 'wagtailadmin', 'css', 'normalize.css'
)
if not os.path.isfile(css_path):
error_hint = """
Most likely you are running a development (non-packaged) copy of
Wagtail and have not built the static assets -
see http://docs.wagtail.io/en/latest/contributing/developing.html
File not found: %s
""" % css_path
errors.append(
Warning(
"CSS for the Wagtail admin is missing",
hint=error_hint,
id='wagtailadmin.W001',
)
)
return errors
|
Throw warning on missing CSS rather than error, so that tests can still run on Dj1.7
|
Throw warning on missing CSS rather than error, so that tests can still run on Dj1.7
|
Python
|
bsd-3-clause
|
nilnvoid/wagtail,gasman/wagtail,wagtail/wagtail,nutztherookie/wagtail,thenewguy/wagtail,rsalmaso/wagtail,kurtw/wagtail,nilnvoid/wagtail,nutztherookie/wagtail,iansprice/wagtail,thenewguy/wagtail,nimasmi/wagtail,nealtodd/wagtail,Toshakins/wagtail,FlipperPA/wagtail,nilnvoid/wagtail,iansprice/wagtail,zerolab/wagtail,mikedingjan/wagtail,timorieber/wagtail,jnns/wagtail,davecranwell/wagtail,rsalmaso/wagtail,chrxr/wagtail,hamsterbacke23/wagtail,jnns/wagtail,kurtrwall/wagtail,kurtw/wagtail,mixxorz/wagtail,rsalmaso/wagtail,davecranwell/wagtail,kaedroho/wagtail,wagtail/wagtail,kaedroho/wagtail,gasman/wagtail,hamsterbacke23/wagtail,kaedroho/wagtail,rsalmaso/wagtail,iansprice/wagtail,gasman/wagtail,nealtodd/wagtail,inonit/wagtail,hamsterbacke23/wagtail,nealtodd/wagtail,mixxorz/wagtail,mixxorz/wagtail,chrxr/wagtail,iansprice/wagtail,zerolab/wagtail,mikedingjan/wagtail,davecranwell/wagtail,gogobook/wagtail,quru/wagtail,quru/wagtail,takeflight/wagtail,timorieber/wagtail,thenewguy/wagtail,nimasmi/wagtail,wagtail/wagtail,chrxr/wagtail,FlipperPA/wagtail,nimasmi/wagtail,gogobook/wagtail,inonit/wagtail,gogobook/wagtail,nutztherookie/wagtail,kurtrwall/wagtail,nealtodd/wagtail,kurtrwall/wagtail,gasman/wagtail,torchbox/wagtail,Toshakins/wagtail,thenewguy/wagtail,takeflight/wagtail,mikedingjan/wagtail,timorieber/wagtail,thenewguy/wagtail,wagtail/wagtail,mikedingjan/wagtail,torchbox/wagtail,kurtrwall/wagtail,FlipperPA/wagtail,jnns/wagtail,jnns/wagtail,quru/wagtail,FlipperPA/wagtail,quru/wagtail,gasman/wagtail,torchbox/wagtail,takeflight/wagtail,kaedroho/wagtail,kaedroho/wagtail,takeflight/wagtail,nimasmi/wagtail,kurtw/wagtail,chrxr/wagtail,nilnvoid/wagtail,inonit/wagtail,gogobook/wagtail,Toshakins/wagtail,nutztherookie/wagtail,JoshBarr/wagtail,zerolab/wagtail,hamsterbacke23/wagtail,JoshBarr/wagtail,davecranwell/wagtail,zerolab/wagtail,Toshakins/wagtail,inonit/wagtail,mixxorz/wagtail,timorieber/wagtail,rsalmaso/wagtail,kurtw/wagtail,mixxorz/wagtail,JoshBarr/wagtail,zerolab/wagtail,torchbox/wagtail,wagtail/wagtail,JoshBarr/wagtail
|
import os
from django.core.checks import Error, register
@register()
def css_install_check(app_configs, **kwargs):
errors = []
css_path = os.path.join(
os.path.dirname(__file__), 'static', 'wagtailadmin', 'css', 'normalize.css'
)
if not os.path.isfile(css_path):
error_hint = """
Most likely you are running a development (non-packaged) copy of
Wagtail and have not built the static assets -
see http://docs.wagtail.io/en/latest/contributing/developing.html
File not found: %s
""" % css_path
errors.append(
Error(
"CSS for the Wagtail admin is missing",
hint=error_hint,
id='wagtailadmin.E001',
)
)
return errors
Throw warning on missing CSS rather than error, so that tests can still run on Dj1.7
|
import os
from django.core.checks import Warning, register
@register()
def css_install_check(app_configs, **kwargs):
errors = []
css_path = os.path.join(
os.path.dirname(__file__), 'static', 'wagtailadmin', 'css', 'normalize.css'
)
if not os.path.isfile(css_path):
error_hint = """
Most likely you are running a development (non-packaged) copy of
Wagtail and have not built the static assets -
see http://docs.wagtail.io/en/latest/contributing/developing.html
File not found: %s
""" % css_path
errors.append(
Warning(
"CSS for the Wagtail admin is missing",
hint=error_hint,
id='wagtailadmin.W001',
)
)
return errors
|
<commit_before>import os
from django.core.checks import Error, register
@register()
def css_install_check(app_configs, **kwargs):
errors = []
css_path = os.path.join(
os.path.dirname(__file__), 'static', 'wagtailadmin', 'css', 'normalize.css'
)
if not os.path.isfile(css_path):
error_hint = """
Most likely you are running a development (non-packaged) copy of
Wagtail and have not built the static assets -
see http://docs.wagtail.io/en/latest/contributing/developing.html
File not found: %s
""" % css_path
errors.append(
Error(
"CSS for the Wagtail admin is missing",
hint=error_hint,
id='wagtailadmin.E001',
)
)
return errors
<commit_msg>Throw warning on missing CSS rather than error, so that tests can still run on Dj1.7<commit_after>
|
import os
from django.core.checks import Warning, register
@register()
def css_install_check(app_configs, **kwargs):
errors = []
css_path = os.path.join(
os.path.dirname(__file__), 'static', 'wagtailadmin', 'css', 'normalize.css'
)
if not os.path.isfile(css_path):
error_hint = """
Most likely you are running a development (non-packaged) copy of
Wagtail and have not built the static assets -
see http://docs.wagtail.io/en/latest/contributing/developing.html
File not found: %s
""" % css_path
errors.append(
Warning(
"CSS for the Wagtail admin is missing",
hint=error_hint,
id='wagtailadmin.W001',
)
)
return errors
|
import os
from django.core.checks import Error, register
@register()
def css_install_check(app_configs, **kwargs):
errors = []
css_path = os.path.join(
os.path.dirname(__file__), 'static', 'wagtailadmin', 'css', 'normalize.css'
)
if not os.path.isfile(css_path):
error_hint = """
Most likely you are running a development (non-packaged) copy of
Wagtail and have not built the static assets -
see http://docs.wagtail.io/en/latest/contributing/developing.html
File not found: %s
""" % css_path
errors.append(
Error(
"CSS for the Wagtail admin is missing",
hint=error_hint,
id='wagtailadmin.E001',
)
)
return errors
Throw warning on missing CSS rather than error, so that tests can still run on Dj1.7import os
from django.core.checks import Warning, register
@register()
def css_install_check(app_configs, **kwargs):
errors = []
css_path = os.path.join(
os.path.dirname(__file__), 'static', 'wagtailadmin', 'css', 'normalize.css'
)
if not os.path.isfile(css_path):
error_hint = """
Most likely you are running a development (non-packaged) copy of
Wagtail and have not built the static assets -
see http://docs.wagtail.io/en/latest/contributing/developing.html
File not found: %s
""" % css_path
errors.append(
Warning(
"CSS for the Wagtail admin is missing",
hint=error_hint,
id='wagtailadmin.W001',
)
)
return errors
|
<commit_before>import os
from django.core.checks import Error, register
@register()
def css_install_check(app_configs, **kwargs):
errors = []
css_path = os.path.join(
os.path.dirname(__file__), 'static', 'wagtailadmin', 'css', 'normalize.css'
)
if not os.path.isfile(css_path):
error_hint = """
Most likely you are running a development (non-packaged) copy of
Wagtail and have not built the static assets -
see http://docs.wagtail.io/en/latest/contributing/developing.html
File not found: %s
""" % css_path
errors.append(
Error(
"CSS for the Wagtail admin is missing",
hint=error_hint,
id='wagtailadmin.E001',
)
)
return errors
<commit_msg>Throw warning on missing CSS rather than error, so that tests can still run on Dj1.7<commit_after>import os
from django.core.checks import Warning, register
@register()
def css_install_check(app_configs, **kwargs):
errors = []
css_path = os.path.join(
os.path.dirname(__file__), 'static', 'wagtailadmin', 'css', 'normalize.css'
)
if not os.path.isfile(css_path):
error_hint = """
Most likely you are running a development (non-packaged) copy of
Wagtail and have not built the static assets -
see http://docs.wagtail.io/en/latest/contributing/developing.html
File not found: %s
""" % css_path
errors.append(
Warning(
"CSS for the Wagtail admin is missing",
hint=error_hint,
id='wagtailadmin.W001',
)
)
return errors
|
ae4a13da2857a5826fa701f25b242c95d56995d9
|
string_length.py
|
string_length.py
|
#!/usr/bin/env python2
#encoding: UTF-8
# Define a function that computes the length of a given list or string.
# (It is true that Python has the len() function built in,
# but writing it yourself is nevertheless a good exercise.)
def string_length(string):
if string == None:
return False
if type(string) is not str and type(string) is not list:
return False
value = 0
for i in string:
value = value + 1
return value
def main():
get_input = raw_input("Enter a value: ")
print string_length(get_input)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python2
#encoding: UTF-8
# Define a function that computes the length of a given list or string.
# (It is true that Python has the len() function built in,
# but writing it yourself is nevertheless a good exercise.)
def string_length(string):
if string == None:
return False
if type(string) is not str and type(string) is not list:
return False
value = 0
new_string = []
if '[' in string and ']' in string:
for i in string:
if i != '[' and i != ']' and i != ',':
new_string.append(i)
string = new_string
for i in string:
value = value + 1
return value
def main():
get_input = raw_input("Enter a value: ")
print string_length(get_input)
if __name__ == "__main__":
main()
|
Add support for list input
|
Add support for list input
|
Python
|
mit
|
giantas/minor-python-tests
|
#!/usr/bin/env python2
#encoding: UTF-8
# Define a function that computes the length of a given list or string.
# (It is true that Python has the len() function built in,
# but writing it yourself is nevertheless a good exercise.)
def string_length(string):
if string == None:
return False
if type(string) is not str and type(string) is not list:
return False
value = 0
for i in string:
value = value + 1
return value
def main():
get_input = raw_input("Enter a value: ")
print string_length(get_input)
if __name__ == "__main__":
main()Add support for list input
|
#!/usr/bin/env python2
#encoding: UTF-8
# Define a function that computes the length of a given list or string.
# (It is true that Python has the len() function built in,
# but writing it yourself is nevertheless a good exercise.)
def string_length(string):
if string == None:
return False
if type(string) is not str and type(string) is not list:
return False
value = 0
new_string = []
if '[' in string and ']' in string:
for i in string:
if i != '[' and i != ']' and i != ',':
new_string.append(i)
string = new_string
for i in string:
value = value + 1
return value
def main():
get_input = raw_input("Enter a value: ")
print string_length(get_input)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python2
#encoding: UTF-8
# Define a function that computes the length of a given list or string.
# (It is true that Python has the len() function built in,
# but writing it yourself is nevertheless a good exercise.)
def string_length(string):
if string == None:
return False
if type(string) is not str and type(string) is not list:
return False
value = 0
for i in string:
value = value + 1
return value
def main():
get_input = raw_input("Enter a value: ")
print string_length(get_input)
if __name__ == "__main__":
main()<commit_msg>Add support for list input<commit_after>
|
#!/usr/bin/env python2
#encoding: UTF-8
# Define a function that computes the length of a given list or string.
# (It is true that Python has the len() function built in,
# but writing it yourself is nevertheless a good exercise.)
def string_length(string):
if string == None:
return False
if type(string) is not str and type(string) is not list:
return False
value = 0
new_string = []
if '[' in string and ']' in string:
for i in string:
if i != '[' and i != ']' and i != ',':
new_string.append(i)
string = new_string
for i in string:
value = value + 1
return value
def main():
get_input = raw_input("Enter a value: ")
print string_length(get_input)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python2
#encoding: UTF-8
# Define a function that computes the length of a given list or string.
# (It is true that Python has the len() function built in,
# but writing it yourself is nevertheless a good exercise.)
def string_length(string):
if string == None:
return False
if type(string) is not str and type(string) is not list:
return False
value = 0
for i in string:
value = value + 1
return value
def main():
get_input = raw_input("Enter a value: ")
print string_length(get_input)
if __name__ == "__main__":
main()Add support for list input#!/usr/bin/env python2
#encoding: UTF-8
# Define a function that computes the length of a given list or string.
# (It is true that Python has the len() function built in,
# but writing it yourself is nevertheless a good exercise.)
def string_length(string):
if string == None:
return False
if type(string) is not str and type(string) is not list:
return False
value = 0
new_string = []
if '[' in string and ']' in string:
for i in string:
if i != '[' and i != ']' and i != ',':
new_string.append(i)
string = new_string
for i in string:
value = value + 1
return value
def main():
get_input = raw_input("Enter a value: ")
print string_length(get_input)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python2
#encoding: UTF-8
# Define a function that computes the length of a given list or string.
# (It is true that Python has the len() function built in,
# but writing it yourself is nevertheless a good exercise.)
def string_length(string):
if string == None:
return False
if type(string) is not str and type(string) is not list:
return False
value = 0
for i in string:
value = value + 1
return value
def main():
get_input = raw_input("Enter a value: ")
print string_length(get_input)
if __name__ == "__main__":
main()<commit_msg>Add support for list input<commit_after>#!/usr/bin/env python2
#encoding: UTF-8
# Define a function that computes the length of a given list or string.
# (It is true that Python has the len() function built in,
# but writing it yourself is nevertheless a good exercise.)
def string_length(string):
if string == None:
return False
if type(string) is not str and type(string) is not list:
return False
value = 0
new_string = []
if '[' in string and ']' in string:
for i in string:
if i != '[' and i != ']' and i != ',':
new_string.append(i)
string = new_string
for i in string:
value = value + 1
return value
def main():
get_input = raw_input("Enter a value: ")
print string_length(get_input)
if __name__ == "__main__":
main()
|
c45881530694488c5ef139e89c05aa24ddb671ff
|
djlint/analyzers/context_processors.py
|
djlint/analyzers/context_processors.py
|
import ast
from .base import BaseAnalyzer, ModuleVisitor, Result
class ContextProcessorsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.core.context_processors.auth':
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.PermWrapper':
'django.contrib.auth.context_processors.PermWrapper',
'django.core.context_processors.PermLookupDict':
'django.contrib.auth.context_processors.PermLookupDict',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class ContextProcessorsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = ContextProcessorsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r function is removed in Django >=1.4, use %r instead'
% (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
import ast
from .base import BaseAnalyzer, ModuleVisitor, Result
class ContextProcessorsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.core.context_processors.auth':
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.PermWrapper':
'django.contrib.auth.context_processors.PermWrapper',
'django.core.context_processors.PermLookupDict':
'django.contrib.auth.context_processors.PermLookupDict',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class ContextProcessorsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = ContextProcessorsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'As of Django 1.4, %r function has beed deprecated and '
'will be removed in Django 1.5. Use %r instead.'
% (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
Fix context processors analyzer: deprecated processors will be removed in Django 1.5
|
Fix context processors analyzer: deprecated processors will be removed in Django 1.5
|
Python
|
isc
|
alfredhq/djlint
|
import ast
from .base import BaseAnalyzer, ModuleVisitor, Result
class ContextProcessorsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.core.context_processors.auth':
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.PermWrapper':
'django.contrib.auth.context_processors.PermWrapper',
'django.core.context_processors.PermLookupDict':
'django.contrib.auth.context_processors.PermLookupDict',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class ContextProcessorsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = ContextProcessorsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r function is removed in Django >=1.4, use %r instead'
% (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
Fix context processors analyzer: deprecated processors will be removed in Django 1.5
|
import ast
from .base import BaseAnalyzer, ModuleVisitor, Result
class ContextProcessorsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.core.context_processors.auth':
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.PermWrapper':
'django.contrib.auth.context_processors.PermWrapper',
'django.core.context_processors.PermLookupDict':
'django.contrib.auth.context_processors.PermLookupDict',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class ContextProcessorsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = ContextProcessorsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'As of Django 1.4, %r function has beed deprecated and '
'will be removed in Django 1.5. Use %r instead.'
% (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
<commit_before>import ast
from .base import BaseAnalyzer, ModuleVisitor, Result
class ContextProcessorsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.core.context_processors.auth':
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.PermWrapper':
'django.contrib.auth.context_processors.PermWrapper',
'django.core.context_processors.PermLookupDict':
'django.contrib.auth.context_processors.PermLookupDict',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class ContextProcessorsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = ContextProcessorsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r function is removed in Django >=1.4, use %r instead'
% (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
<commit_msg>Fix context processors analyzer: deprecated processors will be removed in Django 1.5<commit_after>
|
import ast
from .base import BaseAnalyzer, ModuleVisitor, Result
class ContextProcessorsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.core.context_processors.auth':
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.PermWrapper':
'django.contrib.auth.context_processors.PermWrapper',
'django.core.context_processors.PermLookupDict':
'django.contrib.auth.context_processors.PermLookupDict',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class ContextProcessorsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = ContextProcessorsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'As of Django 1.4, %r function has beed deprecated and '
'will be removed in Django 1.5. Use %r instead.'
% (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
import ast
from .base import BaseAnalyzer, ModuleVisitor, Result
class ContextProcessorsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.core.context_processors.auth':
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.PermWrapper':
'django.contrib.auth.context_processors.PermWrapper',
'django.core.context_processors.PermLookupDict':
'django.contrib.auth.context_processors.PermLookupDict',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class ContextProcessorsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = ContextProcessorsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r function is removed in Django >=1.4, use %r instead'
% (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
Fix context processors analyzer: deprecated processors will be removed in Django 1.5import ast
from .base import BaseAnalyzer, ModuleVisitor, Result
class ContextProcessorsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.core.context_processors.auth':
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.PermWrapper':
'django.contrib.auth.context_processors.PermWrapper',
'django.core.context_processors.PermLookupDict':
'django.contrib.auth.context_processors.PermLookupDict',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class ContextProcessorsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = ContextProcessorsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'As of Django 1.4, %r function has beed deprecated and '
'will be removed in Django 1.5. Use %r instead.'
% (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
<commit_before>import ast
from .base import BaseAnalyzer, ModuleVisitor, Result
class ContextProcessorsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.core.context_processors.auth':
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.PermWrapper':
'django.contrib.auth.context_processors.PermWrapper',
'django.core.context_processors.PermLookupDict':
'django.contrib.auth.context_processors.PermLookupDict',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class ContextProcessorsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = ContextProcessorsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r function is removed in Django >=1.4, use %r instead'
% (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
<commit_msg>Fix context processors analyzer: deprecated processors will be removed in Django 1.5<commit_after>import ast
from .base import BaseAnalyzer, ModuleVisitor, Result
class ContextProcessorsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.core.context_processors.auth':
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.PermWrapper':
'django.contrib.auth.context_processors.PermWrapper',
'django.core.context_processors.PermLookupDict':
'django.contrib.auth.context_processors.PermLookupDict',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class ContextProcessorsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = ContextProcessorsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'As of Django 1.4, %r function has beed deprecated and '
'will be removed in Django 1.5. Use %r instead.'
% (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
b55e58f3619305946decc7c17c558879895f3b1a
|
tests/request_methods/test_sellers.py
|
tests/request_methods/test_sellers.py
|
"""
Tests for the MWS.Sellers API class.
"""
import unittest
import mws
from .utils import CommonRequestTestTools
class SellersTestCase(unittest.TestCase, CommonRequestTestTools):
"""
Test cases for Sellers.
"""
# TODO: Add remaining methods for Sellers
def setUp(self):
self.api = mws.Sellers(
self.CREDENTIAL_ACCESS,
self.CREDENTIAL_SECRET,
self.CREDENTIAL_ACCOUNT,
auth_token=self.CREDENTIAL_TOKEN
)
self.api._test_request_params = True
|
"""
Tests for the Sellers API class.
"""
import unittest
import mws
from .utils import CommonRequestTestTools
class SellersTestCase(unittest.TestCase, CommonRequestTestTools):
"""
Test cases for Sellers.
"""
# TODO: Add remaining methods for Sellers
def setUp(self):
self.api = mws.Sellers(
self.CREDENTIAL_ACCESS,
self.CREDENTIAL_SECRET,
self.CREDENTIAL_ACCOUNT,
auth_token=self.CREDENTIAL_TOKEN
)
self.api._test_request_params = True
def test_list_marketplace_participations(self):
"""
ListMarketplaceParticipations operation
"""
params = self.api.list_marketplace_participations()
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipations'
def test_list_marketplace_participations_by_next_token(self):
"""
ListMarketplaceParticipationsByNextToken operation, by way of method decorator.
"""
next_token = 'token_foobar'
params = self.api.list_marketplace_participations(next_token=next_token)
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipationsByNextToken'
assert params['NextToken'] == next_token
def test_list_marketplace_participations_by_next_token_alias(self):
"""
ListMarketplaceParticipationsByNextToken operation, by way of alias method.
"""
next_token = 'token_foobar'
params = self.api.list_marketplace_participations_by_next_token(next_token)
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipationsByNextToken'
assert params['NextToken'] == next_token
|
Expand tests for Sellers API
|
Expand tests for Sellers API
|
Python
|
unlicense
|
Bobspadger/python-amazon-mws,GriceTurrble/python-amazon-mws
|
"""
Tests for the MWS.Sellers API class.
"""
import unittest
import mws
from .utils import CommonRequestTestTools
class SellersTestCase(unittest.TestCase, CommonRequestTestTools):
"""
Test cases for Sellers.
"""
# TODO: Add remaining methods for Sellers
def setUp(self):
self.api = mws.Sellers(
self.CREDENTIAL_ACCESS,
self.CREDENTIAL_SECRET,
self.CREDENTIAL_ACCOUNT,
auth_token=self.CREDENTIAL_TOKEN
)
self.api._test_request_params = True
Expand tests for Sellers API
|
"""
Tests for the Sellers API class.
"""
import unittest
import mws
from .utils import CommonRequestTestTools
class SellersTestCase(unittest.TestCase, CommonRequestTestTools):
"""
Test cases for Sellers.
"""
# TODO: Add remaining methods for Sellers
def setUp(self):
self.api = mws.Sellers(
self.CREDENTIAL_ACCESS,
self.CREDENTIAL_SECRET,
self.CREDENTIAL_ACCOUNT,
auth_token=self.CREDENTIAL_TOKEN
)
self.api._test_request_params = True
def test_list_marketplace_participations(self):
"""
ListMarketplaceParticipations operation
"""
params = self.api.list_marketplace_participations()
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipations'
def test_list_marketplace_participations_by_next_token(self):
"""
ListMarketplaceParticipationsByNextToken operation, by way of method decorator.
"""
next_token = 'token_foobar'
params = self.api.list_marketplace_participations(next_token=next_token)
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipationsByNextToken'
assert params['NextToken'] == next_token
def test_list_marketplace_participations_by_next_token_alias(self):
"""
ListMarketplaceParticipationsByNextToken operation, by way of alias method.
"""
next_token = 'token_foobar'
params = self.api.list_marketplace_participations_by_next_token(next_token)
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipationsByNextToken'
assert params['NextToken'] == next_token
|
<commit_before>"""
Tests for the MWS.Sellers API class.
"""
import unittest
import mws
from .utils import CommonRequestTestTools
class SellersTestCase(unittest.TestCase, CommonRequestTestTools):
"""
Test cases for Sellers.
"""
# TODO: Add remaining methods for Sellers
def setUp(self):
self.api = mws.Sellers(
self.CREDENTIAL_ACCESS,
self.CREDENTIAL_SECRET,
self.CREDENTIAL_ACCOUNT,
auth_token=self.CREDENTIAL_TOKEN
)
self.api._test_request_params = True
<commit_msg>Expand tests for Sellers API<commit_after>
|
"""
Tests for the Sellers API class.
"""
import unittest
import mws
from .utils import CommonRequestTestTools
class SellersTestCase(unittest.TestCase, CommonRequestTestTools):
"""
Test cases for Sellers.
"""
# TODO: Add remaining methods for Sellers
def setUp(self):
self.api = mws.Sellers(
self.CREDENTIAL_ACCESS,
self.CREDENTIAL_SECRET,
self.CREDENTIAL_ACCOUNT,
auth_token=self.CREDENTIAL_TOKEN
)
self.api._test_request_params = True
def test_list_marketplace_participations(self):
"""
ListMarketplaceParticipations operation
"""
params = self.api.list_marketplace_participations()
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipations'
def test_list_marketplace_participations_by_next_token(self):
"""
ListMarketplaceParticipationsByNextToken operation, by way of method decorator.
"""
next_token = 'token_foobar'
params = self.api.list_marketplace_participations(next_token=next_token)
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipationsByNextToken'
assert params['NextToken'] == next_token
def test_list_marketplace_participations_by_next_token_alias(self):
"""
ListMarketplaceParticipationsByNextToken operation, by way of alias method.
"""
next_token = 'token_foobar'
params = self.api.list_marketplace_participations_by_next_token(next_token)
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipationsByNextToken'
assert params['NextToken'] == next_token
|
"""
Tests for the MWS.Sellers API class.
"""
import unittest
import mws
from .utils import CommonRequestTestTools
class SellersTestCase(unittest.TestCase, CommonRequestTestTools):
"""
Test cases for Sellers.
"""
# TODO: Add remaining methods for Sellers
def setUp(self):
self.api = mws.Sellers(
self.CREDENTIAL_ACCESS,
self.CREDENTIAL_SECRET,
self.CREDENTIAL_ACCOUNT,
auth_token=self.CREDENTIAL_TOKEN
)
self.api._test_request_params = True
Expand tests for Sellers API"""
Tests for the Sellers API class.
"""
import unittest
import mws
from .utils import CommonRequestTestTools
class SellersTestCase(unittest.TestCase, CommonRequestTestTools):
"""
Test cases for Sellers.
"""
# TODO: Add remaining methods for Sellers
def setUp(self):
self.api = mws.Sellers(
self.CREDENTIAL_ACCESS,
self.CREDENTIAL_SECRET,
self.CREDENTIAL_ACCOUNT,
auth_token=self.CREDENTIAL_TOKEN
)
self.api._test_request_params = True
def test_list_marketplace_participations(self):
"""
ListMarketplaceParticipations operation
"""
params = self.api.list_marketplace_participations()
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipations'
def test_list_marketplace_participations_by_next_token(self):
"""
ListMarketplaceParticipationsByNextToken operation, by way of method decorator.
"""
next_token = 'token_foobar'
params = self.api.list_marketplace_participations(next_token=next_token)
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipationsByNextToken'
assert params['NextToken'] == next_token
def test_list_marketplace_participations_by_next_token_alias(self):
"""
ListMarketplaceParticipationsByNextToken operation, by way of alias method.
"""
next_token = 'token_foobar'
params = self.api.list_marketplace_participations_by_next_token(next_token)
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipationsByNextToken'
assert params['NextToken'] == next_token
|
<commit_before>"""
Tests for the MWS.Sellers API class.
"""
import unittest
import mws
from .utils import CommonRequestTestTools
class SellersTestCase(unittest.TestCase, CommonRequestTestTools):
"""
Test cases for Sellers.
"""
# TODO: Add remaining methods for Sellers
def setUp(self):
self.api = mws.Sellers(
self.CREDENTIAL_ACCESS,
self.CREDENTIAL_SECRET,
self.CREDENTIAL_ACCOUNT,
auth_token=self.CREDENTIAL_TOKEN
)
self.api._test_request_params = True
<commit_msg>Expand tests for Sellers API<commit_after>"""
Tests for the Sellers API class.
"""
import unittest
import mws
from .utils import CommonRequestTestTools
class SellersTestCase(unittest.TestCase, CommonRequestTestTools):
"""
Test cases for Sellers.
"""
# TODO: Add remaining methods for Sellers
def setUp(self):
self.api = mws.Sellers(
self.CREDENTIAL_ACCESS,
self.CREDENTIAL_SECRET,
self.CREDENTIAL_ACCOUNT,
auth_token=self.CREDENTIAL_TOKEN
)
self.api._test_request_params = True
def test_list_marketplace_participations(self):
"""
ListMarketplaceParticipations operation
"""
params = self.api.list_marketplace_participations()
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipations'
def test_list_marketplace_participations_by_next_token(self):
"""
ListMarketplaceParticipationsByNextToken operation, by way of method decorator.
"""
next_token = 'token_foobar'
params = self.api.list_marketplace_participations(next_token=next_token)
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipationsByNextToken'
assert params['NextToken'] == next_token
def test_list_marketplace_participations_by_next_token_alias(self):
"""
ListMarketplaceParticipationsByNextToken operation, by way of alias method.
"""
next_token = 'token_foobar'
params = self.api.list_marketplace_participations_by_next_token(next_token)
self.assert_common_params(params)
assert params['Action'] == 'ListMarketplaceParticipationsByNextToken'
assert params['NextToken'] == next_token
|
83a086b865c2db791a208d3854c15963ed3fc693
|
plum/tests/service_test.py
|
plum/tests/service_test.py
|
from unittest import TestCase
from docker import Client
from plum import Service
class ServiceTestCase(TestCase):
def setUp(self):
self.client = Client('http://127.0.0.1:4243')
self.client.pull('ubuntu')
for c in self.client.containers():
self.client.kill(c['Id'])
self.service = Service(
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
)
def test_up_scale_down(self):
self.assertEqual(len(self.service.containers), 0)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.scale(2)
self.assertEqual(len(self.service.containers), 2)
self.service.scale(1)
self.assertEqual(len(self.service.containers), 1)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
|
from unittest import TestCase
from docker import Client
from plum import Service
class ServiceTestCase(TestCase):
def setUp(self):
self.client = Client('http://127.0.0.1:4243')
self.client.pull('ubuntu')
for c in self.client.containers(all=True):
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
self.service = Service(
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
)
def test_up_scale_down(self):
self.assertEqual(len(self.service.containers), 0)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.scale(2)
self.assertEqual(len(self.service.containers), 2)
self.service.scale(1)
self.assertEqual(len(self.service.containers), 1)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
|
Delete all containers on the Docker daemon before running test
|
Delete all containers on the Docker daemon before running test
|
Python
|
apache-2.0
|
phiroict/docker,xydinesh/compose,unodba/compose,shin-/docker.github.io,bdwill/docker.github.io,bsmr-docker/compose,KevinGreene/compose,Katlean/fig,screwgoth/compose,bobphill/compose,aanand/fig,zhangspook/compose,GM-Alex/compose,KevinGreene/compose,saada/compose,docker-zh/docker.github.io,ekristen/compose,jrabbit/compose,danix800/docker.github.io,bdwill/docker.github.io,vlajos/compose,BSWANG/denverdino.github.io,KalleDK/compose,ain/compose,kikkomep/compose,jeanpralo/compose,calou/compose,londoncalling/docker.github.io,danix800/docker.github.io,dnephin/compose,alexisbellido/docker.github.io,lmesz/compose,heroku/fig,docker-zh/docker.github.io,feelobot/compose,dopry/compose,viranch/compose,joeuo/docker.github.io,tpounds/compose,heroku/fig,philwrenn/compose,swoopla/compose,GM-Alex/compose,alexisbellido/docker.github.io,benhamill/compose,JimGalasyn/docker.github.io,MSakamaki/compose,docker-zh/docker.github.io,bbirand/compose,sanscontext/docker.github.io,lmesz/compose,menglingwei/denverdino.github.io,johnstep/docker.github.io,iamluc/compose,xydinesh/compose,mohitsoni/compose,alexandrev/compose,ralphtheninja/compose,noironetworks/compose,VinceBarresi/compose,genki/compose,TomasTomecek/compose,BSWANG/denverdino.github.io,twitherspoon/compose,thaJeztah/docker.github.io,hoogenm/compose,michael-k/docker-compose,bdwill/docker.github.io,kojiromike/compose,denverdino/denverdino.github.io,mrfuxi/compose,lukemarsden/compose,goloveychuk/compose,joaofnfernandes/docker.github.io,docker/docker.github.io,josephpage/compose,andrewgee/compose,albers/compose,abesto/fig,mdaue/compose,runcom/compose,marcusmartins/compose,mark-adams/compose,mrfuxi/compose,twitherspoon/compose,charleswhchan/compose,thieman/compose,docker-zh/docker.github.io,Katlean/fig,joaofnfernandes/docker.github.io,phiroict/docker,anweiss/docker.github.io,LuisBosquez/docker.github.io,shubheksha/docker.github.io,thaJeztah/docker.github.io,ouziel-slama/compose,jonaseck2/compose,aduermael/docker.github.io,shakamunyi/fig,vdemeester/compose,brunocascio/compose,sanscontext/docker.github.io,denverdino/denverdino.github.io,mohitsoni/compose,bdwill/docker.github.io,talolard/compose,qzio/compose,gtrdotmcs/compose,dilgerma/compose,thaJeztah/docker.github.io,rgbkrk/compose,Yelp/docker-compose,jzwlqx/denverdino.github.io,tangkun75/compose,docker-zh/docker.github.io,LuisBosquez/docker.github.io,alexisbellido/docker.github.io,vlajos/compose,LuisBosquez/docker.github.io,thaJeztah/docker.github.io,thaJeztah/compose,rgbkrk/compose,shubheksha/docker.github.io,shin-/compose,j-fuentes/compose,d2bit/compose,talolard/compose,d2bit/compose,menglingwei/denverdino.github.io,shin-/docker.github.io,mdaue/compose,brunocascio/compose,aanand/fig,DoubleMalt/compose,rillig/docker.github.io,anweiss/docker.github.io,dilgerma/compose,mark-adams/compose,ain/compose,bcicen/fig,dockerhn/compose,gtrdotmcs/compose,denverdino/compose,anweiss/docker.github.io,cclauss/compose,jonaseck2/compose,mchasal/compose,schmunk42/compose,uvgroovy/compose,troy0820/docker.github.io,mchasal/compose,TomasTomecek/compose,BSWANG/denverdino.github.io,hypriot/compose,bfirsh/fig,jorgeLuizChaves/compose,ChrisChinchilla/compose,menglingwei/denverdino.github.io,sdurrheimer/compose,ouziel-slama/compose,alunduil/fig,sebglazebrook/compose,cgvarela/compose,jgrowl/compose,tiry/compose,JimGalasyn/docker.github.io,VinceBarresi/compose,MSakamaki/compose,ionrock/compose,londoncalling/docker.github.io,ChrisChinchilla/compose,tiry/compose,LuisBosquez/docker.github.io,rillig/docker.github.io,johnstep/docker.github.io,jessekl/compose,gdevillele/docker.github.io,docker/docker.github.io,jiekechoo/compose,troy0820/docker.github.io,londoncalling/docker.github.io,thaJeztah/compose,pspierce/compose,hypriot/compose,phiroict/docker,vdemeester/compose,thaJeztah/docker.github.io,Yelp/docker-compose,pspierce/compose,ZJaffee/compose,danix800/docker.github.io,ralphtheninja/compose,shubheksha/docker.github.io,shubheksha/docker.github.io,ph-One/compose,uvgroovy/compose,BSWANG/denverdino.github.io,JimGalasyn/docker.github.io,jzwlqx/denverdino.github.io,jessekl/compose,qzio/compose,artemkaint/compose,docker/docker.github.io,joeuo/docker.github.io,browning/compose,troy0820/docker.github.io,bfirsh/fig,bsmr-docker/compose,au-phiware/compose,mbailey/compose,johnstep/docker.github.io,browning/compose,sanscontext/docker.github.io,abesto/fig,bdwill/docker.github.io,dopry/compose,ZJaffee/compose,dbdd4us/compose,mosquito/docker-compose,Chouser/compose,gdevillele/docker.github.io,Chouser/compose,joaofnfernandes/docker.github.io,nerro/compose,gdevillele/docker.github.io,j-fuentes/compose,denverdino/docker.github.io,menglingwei/denverdino.github.io,jzwlqx/denverdino.github.io,phiroict/docker,joaofnfernandes/docker.github.io,denverdino/docker.github.io,dockerhn/compose,rstacruz/compose,cgvarela/compose,shin-/docker.github.io,funkyfuture/docker-compose,KalleDK/compose,anweiss/docker.github.io,mbailey/compose,johnstep/docker.github.io,dnephin/compose,denverdino/docker.github.io,mindaugasrukas/compose,simonista/compose,kojiromike/compose,bcicen/fig,benhamill/compose,londoncalling/docker.github.io,bobphill/compose,mnuessler/compose,ph-One/compose,danix800/docker.github.io,jiekechoo/compose,jzwlqx/denverdino.github.io,charleswhchan/compose,anweiss/docker.github.io,ionrock/compose,alexandrev/compose,mnuessler/compose,mnowster/compose,JimGalasyn/docker.github.io,RobertNorthard/compose,kikkomep/compose,simonista/compose,joeuo/docker.github.io,artemkaint/compose,zhangspook/compose,moxiegirl/compose,mindaugasrukas/compose,runcom/compose,sanscontext/docker.github.io,rillig/docker.github.io,alunduil/fig,swoopla/compose,prologic/compose,joeuo/docker.github.io,nhumrich/compose,TheDataShed/compose,aduermael/docker.github.io,sdurrheimer/compose,moxiegirl/compose,docker/docker.github.io,tpounds/compose,rillig/docker.github.io,londoncalling/docker.github.io,aduermael/docker.github.io,unodba/compose,dbdd4us/compose,feelobot/compose,mnowster/compose,Dakno/compose,nhumrich/compose,JimGalasyn/docker.github.io,gdevillele/docker.github.io,shubheksha/docker.github.io,shin-/compose,screwgoth/compose,sebglazebrook/compose,denverdino/denverdino.github.io,ggtools/compose,rstacruz/compose,bbirand/compose,viranch/compose,bcicen/fig,glogiotatidis/compose,troy0820/docker.github.io,TheDataShed/compose,gdevillele/docker.github.io,shin-/docker.github.io,glogiotatidis/compose,thieman/compose,funkyfuture/docker-compose,jzwlqx/denverdino.github.io,mosquito/docker-compose,albers/compose,jeanpralo/compose,RobertNorthard/compose,amitsaha/compose,prologic/compose,BSWANG/denverdino.github.io,amitsaha/compose,DoubleMalt/compose,au-phiware/compose,denverdino/docker.github.io,Dakno/compose,LuisBosquez/docker.github.io,phiroict/docker,philwrenn/compose,genki/compose,shakamunyi/fig,denverdino/denverdino.github.io,marcusmartins/compose,ekristen/compose,iamluc/compose,menglingwei/denverdino.github.io,lukemarsden/compose,sanscontext/docker.github.io,docker/docker.github.io,schmunk42/compose,michael-k/docker-compose,josephpage/compose,alexisbellido/docker.github.io,johnstep/docker.github.io,aduermael/docker.github.io,shin-/docker.github.io,joeuo/docker.github.io,hoogenm/compose,saada/compose,denverdino/denverdino.github.io,heroku/fig,noironetworks/compose,jorgeLuizChaves/compose,calou/compose,tangkun75/compose,nerro/compose,cclauss/compose,joaofnfernandes/docker.github.io,jgrowl/compose,denverdino/compose,alexisbellido/docker.github.io,goloveychuk/compose,ggtools/compose,jrabbit/compose,denverdino/docker.github.io,andrewgee/compose
|
from unittest import TestCase
from docker import Client
from plum import Service
class ServiceTestCase(TestCase):
def setUp(self):
self.client = Client('http://127.0.0.1:4243')
self.client.pull('ubuntu')
for c in self.client.containers():
self.client.kill(c['Id'])
self.service = Service(
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
)
def test_up_scale_down(self):
self.assertEqual(len(self.service.containers), 0)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.scale(2)
self.assertEqual(len(self.service.containers), 2)
self.service.scale(1)
self.assertEqual(len(self.service.containers), 1)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
Delete all containers on the Docker daemon before running test
|
from unittest import TestCase
from docker import Client
from plum import Service
class ServiceTestCase(TestCase):
def setUp(self):
self.client = Client('http://127.0.0.1:4243')
self.client.pull('ubuntu')
for c in self.client.containers(all=True):
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
self.service = Service(
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
)
def test_up_scale_down(self):
self.assertEqual(len(self.service.containers), 0)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.scale(2)
self.assertEqual(len(self.service.containers), 2)
self.service.scale(1)
self.assertEqual(len(self.service.containers), 1)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
|
<commit_before>from unittest import TestCase
from docker import Client
from plum import Service
class ServiceTestCase(TestCase):
def setUp(self):
self.client = Client('http://127.0.0.1:4243')
self.client.pull('ubuntu')
for c in self.client.containers():
self.client.kill(c['Id'])
self.service = Service(
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
)
def test_up_scale_down(self):
self.assertEqual(len(self.service.containers), 0)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.scale(2)
self.assertEqual(len(self.service.containers), 2)
self.service.scale(1)
self.assertEqual(len(self.service.containers), 1)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
<commit_msg>Delete all containers on the Docker daemon before running test<commit_after>
|
from unittest import TestCase
from docker import Client
from plum import Service
class ServiceTestCase(TestCase):
def setUp(self):
self.client = Client('http://127.0.0.1:4243')
self.client.pull('ubuntu')
for c in self.client.containers(all=True):
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
self.service = Service(
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
)
def test_up_scale_down(self):
self.assertEqual(len(self.service.containers), 0)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.scale(2)
self.assertEqual(len(self.service.containers), 2)
self.service.scale(1)
self.assertEqual(len(self.service.containers), 1)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
|
from unittest import TestCase
from docker import Client
from plum import Service
class ServiceTestCase(TestCase):
def setUp(self):
self.client = Client('http://127.0.0.1:4243')
self.client.pull('ubuntu')
for c in self.client.containers():
self.client.kill(c['Id'])
self.service = Service(
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
)
def test_up_scale_down(self):
self.assertEqual(len(self.service.containers), 0)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.scale(2)
self.assertEqual(len(self.service.containers), 2)
self.service.scale(1)
self.assertEqual(len(self.service.containers), 1)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
Delete all containers on the Docker daemon before running testfrom unittest import TestCase
from docker import Client
from plum import Service
class ServiceTestCase(TestCase):
def setUp(self):
self.client = Client('http://127.0.0.1:4243')
self.client.pull('ubuntu')
for c in self.client.containers(all=True):
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
self.service = Service(
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
)
def test_up_scale_down(self):
self.assertEqual(len(self.service.containers), 0)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.scale(2)
self.assertEqual(len(self.service.containers), 2)
self.service.scale(1)
self.assertEqual(len(self.service.containers), 1)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
|
<commit_before>from unittest import TestCase
from docker import Client
from plum import Service
class ServiceTestCase(TestCase):
def setUp(self):
self.client = Client('http://127.0.0.1:4243')
self.client.pull('ubuntu')
for c in self.client.containers():
self.client.kill(c['Id'])
self.service = Service(
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
)
def test_up_scale_down(self):
self.assertEqual(len(self.service.containers), 0)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.scale(2)
self.assertEqual(len(self.service.containers), 2)
self.service.scale(1)
self.assertEqual(len(self.service.containers), 1)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
<commit_msg>Delete all containers on the Docker daemon before running test<commit_after>from unittest import TestCase
from docker import Client
from plum import Service
class ServiceTestCase(TestCase):
def setUp(self):
self.client = Client('http://127.0.0.1:4243')
self.client.pull('ubuntu')
for c in self.client.containers(all=True):
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
self.service = Service(
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
)
def test_up_scale_down(self):
self.assertEqual(len(self.service.containers), 0)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.start()
self.assertEqual(len(self.service.containers), 1)
self.service.scale(2)
self.assertEqual(len(self.service.containers), 2)
self.service.scale(1)
self.assertEqual(len(self.service.containers), 1)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
self.service.stop()
self.assertEqual(len(self.service.containers), 0)
|
83a5688181ed3fac058cd1b9b15f885e47578409
|
testsuite/E20.py
|
testsuite/E20.py
|
#: E201
spam( ham[1], {eggs: 2})
#: E201
spam(ham[ 1], {eggs: 2})
#: E201
spam(ham[1], { eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
#:
#: E202
spam(ham[1], {eggs: 2} )
#: E202
spam(ham[1], {eggs: 2 })
#: E202
spam(ham[1 ], {eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
result = func(
arg1='some value',
arg2='another value',
)
result = func(
arg1='some value',
arg2='another value'
)
result = [
item for item in items
if item > 5
]
#:
#: E203
if x == 4 :
print x, y
x, y = y, x
#: E203 E702
if x == 4:
print x, y ; x, y = y, x
#: E203
if x == 4:
print x, y
x, y = y , x
#: Okay
if x == 4:
print x, y
x, y = y, x
a[b1, :] == a[b1, ...]
b = a[:, b1]
#:
|
#: E201:1:6
spam( ham[1], {eggs: 2})
#: E201:1:10
spam(ham[ 1], {eggs: 2})
#: E201:1:15
spam(ham[1], { eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
#:
#: E202:1:23
spam(ham[1], {eggs: 2} )
#: E202:1:22
spam(ham[1], {eggs: 2 })
#: E202:1:11
spam(ham[1 ], {eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
result = func(
arg1='some value',
arg2='another value',
)
result = func(
arg1='some value',
arg2='another value'
)
result = [
item for item in items
if item > 5
]
#:
#: E203:1:10
if x == 4 :
print x, y
x, y = y, x
#: E203:2:15 E702:2:16
if x == 4:
print x, y ; x, y = y, x
#: E203:3:13
if x == 4:
print x, y
x, y = y , x
#: Okay
if x == 4:
print x, y
x, y = y, x
a[b1, :] == a[b1, ...]
b = a[:, b1]
#:
|
Add some tests with row and column
|
Add some tests with row and column
|
Python
|
mit
|
jayvdb/pep8,pedros/pep8,fabioz/pep8,MeteorAdminz/pep8,ojengwa/pep8,PyCQA/pep8,codeclimate/pep8,reinout/pep8,ABaldwinHunter/pep8,jayvdb/pep8,reinout/pep8,asandyz/pep8,ABaldwinHunter/pep8-clone-classic,pandeesh/pep8,doismellburning/pep8,fabioz/pep8,zevnux/pep8
|
#: E201
spam( ham[1], {eggs: 2})
#: E201
spam(ham[ 1], {eggs: 2})
#: E201
spam(ham[1], { eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
#:
#: E202
spam(ham[1], {eggs: 2} )
#: E202
spam(ham[1], {eggs: 2 })
#: E202
spam(ham[1 ], {eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
result = func(
arg1='some value',
arg2='another value',
)
result = func(
arg1='some value',
arg2='another value'
)
result = [
item for item in items
if item > 5
]
#:
#: E203
if x == 4 :
print x, y
x, y = y, x
#: E203 E702
if x == 4:
print x, y ; x, y = y, x
#: E203
if x == 4:
print x, y
x, y = y , x
#: Okay
if x == 4:
print x, y
x, y = y, x
a[b1, :] == a[b1, ...]
b = a[:, b1]
#:
Add some tests with row and column
|
#: E201:1:6
spam( ham[1], {eggs: 2})
#: E201:1:10
spam(ham[ 1], {eggs: 2})
#: E201:1:15
spam(ham[1], { eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
#:
#: E202:1:23
spam(ham[1], {eggs: 2} )
#: E202:1:22
spam(ham[1], {eggs: 2 })
#: E202:1:11
spam(ham[1 ], {eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
result = func(
arg1='some value',
arg2='another value',
)
result = func(
arg1='some value',
arg2='another value'
)
result = [
item for item in items
if item > 5
]
#:
#: E203:1:10
if x == 4 :
print x, y
x, y = y, x
#: E203:2:15 E702:2:16
if x == 4:
print x, y ; x, y = y, x
#: E203:3:13
if x == 4:
print x, y
x, y = y , x
#: Okay
if x == 4:
print x, y
x, y = y, x
a[b1, :] == a[b1, ...]
b = a[:, b1]
#:
|
<commit_before>#: E201
spam( ham[1], {eggs: 2})
#: E201
spam(ham[ 1], {eggs: 2})
#: E201
spam(ham[1], { eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
#:
#: E202
spam(ham[1], {eggs: 2} )
#: E202
spam(ham[1], {eggs: 2 })
#: E202
spam(ham[1 ], {eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
result = func(
arg1='some value',
arg2='another value',
)
result = func(
arg1='some value',
arg2='another value'
)
result = [
item for item in items
if item > 5
]
#:
#: E203
if x == 4 :
print x, y
x, y = y, x
#: E203 E702
if x == 4:
print x, y ; x, y = y, x
#: E203
if x == 4:
print x, y
x, y = y , x
#: Okay
if x == 4:
print x, y
x, y = y, x
a[b1, :] == a[b1, ...]
b = a[:, b1]
#:
<commit_msg>Add some tests with row and column<commit_after>
|
#: E201:1:6
spam( ham[1], {eggs: 2})
#: E201:1:10
spam(ham[ 1], {eggs: 2})
#: E201:1:15
spam(ham[1], { eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
#:
#: E202:1:23
spam(ham[1], {eggs: 2} )
#: E202:1:22
spam(ham[1], {eggs: 2 })
#: E202:1:11
spam(ham[1 ], {eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
result = func(
arg1='some value',
arg2='another value',
)
result = func(
arg1='some value',
arg2='another value'
)
result = [
item for item in items
if item > 5
]
#:
#: E203:1:10
if x == 4 :
print x, y
x, y = y, x
#: E203:2:15 E702:2:16
if x == 4:
print x, y ; x, y = y, x
#: E203:3:13
if x == 4:
print x, y
x, y = y , x
#: Okay
if x == 4:
print x, y
x, y = y, x
a[b1, :] == a[b1, ...]
b = a[:, b1]
#:
|
#: E201
spam( ham[1], {eggs: 2})
#: E201
spam(ham[ 1], {eggs: 2})
#: E201
spam(ham[1], { eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
#:
#: E202
spam(ham[1], {eggs: 2} )
#: E202
spam(ham[1], {eggs: 2 })
#: E202
spam(ham[1 ], {eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
result = func(
arg1='some value',
arg2='another value',
)
result = func(
arg1='some value',
arg2='another value'
)
result = [
item for item in items
if item > 5
]
#:
#: E203
if x == 4 :
print x, y
x, y = y, x
#: E203 E702
if x == 4:
print x, y ; x, y = y, x
#: E203
if x == 4:
print x, y
x, y = y , x
#: Okay
if x == 4:
print x, y
x, y = y, x
a[b1, :] == a[b1, ...]
b = a[:, b1]
#:
Add some tests with row and column#: E201:1:6
spam( ham[1], {eggs: 2})
#: E201:1:10
spam(ham[ 1], {eggs: 2})
#: E201:1:15
spam(ham[1], { eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
#:
#: E202:1:23
spam(ham[1], {eggs: 2} )
#: E202:1:22
spam(ham[1], {eggs: 2 })
#: E202:1:11
spam(ham[1 ], {eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
result = func(
arg1='some value',
arg2='another value',
)
result = func(
arg1='some value',
arg2='another value'
)
result = [
item for item in items
if item > 5
]
#:
#: E203:1:10
if x == 4 :
print x, y
x, y = y, x
#: E203:2:15 E702:2:16
if x == 4:
print x, y ; x, y = y, x
#: E203:3:13
if x == 4:
print x, y
x, y = y , x
#: Okay
if x == 4:
print x, y
x, y = y, x
a[b1, :] == a[b1, ...]
b = a[:, b1]
#:
|
<commit_before>#: E201
spam( ham[1], {eggs: 2})
#: E201
spam(ham[ 1], {eggs: 2})
#: E201
spam(ham[1], { eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
#:
#: E202
spam(ham[1], {eggs: 2} )
#: E202
spam(ham[1], {eggs: 2 })
#: E202
spam(ham[1 ], {eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
result = func(
arg1='some value',
arg2='another value',
)
result = func(
arg1='some value',
arg2='another value'
)
result = [
item for item in items
if item > 5
]
#:
#: E203
if x == 4 :
print x, y
x, y = y, x
#: E203 E702
if x == 4:
print x, y ; x, y = y, x
#: E203
if x == 4:
print x, y
x, y = y , x
#: Okay
if x == 4:
print x, y
x, y = y, x
a[b1, :] == a[b1, ...]
b = a[:, b1]
#:
<commit_msg>Add some tests with row and column<commit_after>#: E201:1:6
spam( ham[1], {eggs: 2})
#: E201:1:10
spam(ham[ 1], {eggs: 2})
#: E201:1:15
spam(ham[1], { eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
#:
#: E202:1:23
spam(ham[1], {eggs: 2} )
#: E202:1:22
spam(ham[1], {eggs: 2 })
#: E202:1:11
spam(ham[1 ], {eggs: 2})
#: Okay
spam(ham[1], {eggs: 2})
result = func(
arg1='some value',
arg2='another value',
)
result = func(
arg1='some value',
arg2='another value'
)
result = [
item for item in items
if item > 5
]
#:
#: E203:1:10
if x == 4 :
print x, y
x, y = y, x
#: E203:2:15 E702:2:16
if x == 4:
print x, y ; x, y = y, x
#: E203:3:13
if x == 4:
print x, y
x, y = y , x
#: Okay
if x == 4:
print x, y
x, y = y, x
a[b1, :] == a[b1, ...]
b = a[:, b1]
#:
|
3c86abb5d2a728604b97a33c3f989039231205b0
|
ooni/tests/test_utils.py
|
ooni/tests/test_utils.py
|
import unittest
from ooni.utils import pushFilenameStack
class TestUtils(unittest.TestCase):
def test_pushFilenameStack(self):
f = open("dummyfile", "w+")
f.write("0\n")
f.close()
for i in xrange(1, 5):
f = open("dummyfile.%s" % i, "w+")
f.write("%s\n" % i)
f.close()
pushFilenameStack("dummyfile")
for i in xrange(1, 5):
f = open("dummyfile.%s" % i)
c = f.readlines()[0].strip()
self.assertEqual(str(i-1), str(c))
f.close()
|
import os
import unittest
from ooni.utils import pushFilenameStack
basefilename = os.path.abspath('dummyfile')
class TestUtils(unittest.TestCase):
def test_pushFilenameStack(self):
f = open(basefilename, "w+")
f.write("0\n")
f.close()
for i in xrange(1, 5):
f = open(basefilename+".%s" % i, "w+")
f.write("%s\n" % i)
f.close()
pushFilenameStack(basefilename)
for i in xrange(1, 5):
f = open(basefilename+".%s" % i)
c = f.readlines()[0].strip()
self.assertEqual(str(i-1), str(c))
f.close()
|
Use absolute filepath instead of relative
|
Use absolute filepath instead of relative
|
Python
|
bsd-2-clause
|
lordappsec/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe
|
import unittest
from ooni.utils import pushFilenameStack
class TestUtils(unittest.TestCase):
def test_pushFilenameStack(self):
f = open("dummyfile", "w+")
f.write("0\n")
f.close()
for i in xrange(1, 5):
f = open("dummyfile.%s" % i, "w+")
f.write("%s\n" % i)
f.close()
pushFilenameStack("dummyfile")
for i in xrange(1, 5):
f = open("dummyfile.%s" % i)
c = f.readlines()[0].strip()
self.assertEqual(str(i-1), str(c))
f.close()
Use absolute filepath instead of relative
|
import os
import unittest
from ooni.utils import pushFilenameStack
basefilename = os.path.abspath('dummyfile')
class TestUtils(unittest.TestCase):
def test_pushFilenameStack(self):
f = open(basefilename, "w+")
f.write("0\n")
f.close()
for i in xrange(1, 5):
f = open(basefilename+".%s" % i, "w+")
f.write("%s\n" % i)
f.close()
pushFilenameStack(basefilename)
for i in xrange(1, 5):
f = open(basefilename+".%s" % i)
c = f.readlines()[0].strip()
self.assertEqual(str(i-1), str(c))
f.close()
|
<commit_before>import unittest
from ooni.utils import pushFilenameStack
class TestUtils(unittest.TestCase):
def test_pushFilenameStack(self):
f = open("dummyfile", "w+")
f.write("0\n")
f.close()
for i in xrange(1, 5):
f = open("dummyfile.%s" % i, "w+")
f.write("%s\n" % i)
f.close()
pushFilenameStack("dummyfile")
for i in xrange(1, 5):
f = open("dummyfile.%s" % i)
c = f.readlines()[0].strip()
self.assertEqual(str(i-1), str(c))
f.close()
<commit_msg>Use absolute filepath instead of relative<commit_after>
|
import os
import unittest
from ooni.utils import pushFilenameStack
basefilename = os.path.abspath('dummyfile')
class TestUtils(unittest.TestCase):
def test_pushFilenameStack(self):
f = open(basefilename, "w+")
f.write("0\n")
f.close()
for i in xrange(1, 5):
f = open(basefilename+".%s" % i, "w+")
f.write("%s\n" % i)
f.close()
pushFilenameStack(basefilename)
for i in xrange(1, 5):
f = open(basefilename+".%s" % i)
c = f.readlines()[0].strip()
self.assertEqual(str(i-1), str(c))
f.close()
|
import unittest
from ooni.utils import pushFilenameStack
class TestUtils(unittest.TestCase):
def test_pushFilenameStack(self):
f = open("dummyfile", "w+")
f.write("0\n")
f.close()
for i in xrange(1, 5):
f = open("dummyfile.%s" % i, "w+")
f.write("%s\n" % i)
f.close()
pushFilenameStack("dummyfile")
for i in xrange(1, 5):
f = open("dummyfile.%s" % i)
c = f.readlines()[0].strip()
self.assertEqual(str(i-1), str(c))
f.close()
Use absolute filepath instead of relativeimport os
import unittest
from ooni.utils import pushFilenameStack
basefilename = os.path.abspath('dummyfile')
class TestUtils(unittest.TestCase):
def test_pushFilenameStack(self):
f = open(basefilename, "w+")
f.write("0\n")
f.close()
for i in xrange(1, 5):
f = open(basefilename+".%s" % i, "w+")
f.write("%s\n" % i)
f.close()
pushFilenameStack(basefilename)
for i in xrange(1, 5):
f = open(basefilename+".%s" % i)
c = f.readlines()[0].strip()
self.assertEqual(str(i-1), str(c))
f.close()
|
<commit_before>import unittest
from ooni.utils import pushFilenameStack
class TestUtils(unittest.TestCase):
def test_pushFilenameStack(self):
f = open("dummyfile", "w+")
f.write("0\n")
f.close()
for i in xrange(1, 5):
f = open("dummyfile.%s" % i, "w+")
f.write("%s\n" % i)
f.close()
pushFilenameStack("dummyfile")
for i in xrange(1, 5):
f = open("dummyfile.%s" % i)
c = f.readlines()[0].strip()
self.assertEqual(str(i-1), str(c))
f.close()
<commit_msg>Use absolute filepath instead of relative<commit_after>import os
import unittest
from ooni.utils import pushFilenameStack
basefilename = os.path.abspath('dummyfile')
class TestUtils(unittest.TestCase):
def test_pushFilenameStack(self):
f = open(basefilename, "w+")
f.write("0\n")
f.close()
for i in xrange(1, 5):
f = open(basefilename+".%s" % i, "w+")
f.write("%s\n" % i)
f.close()
pushFilenameStack(basefilename)
for i in xrange(1, 5):
f = open(basefilename+".%s" % i)
c = f.readlines()[0].strip()
self.assertEqual(str(i-1), str(c))
f.close()
|
6aee8af90e5c09fb56d5d9194f0d0db5ce8b38f8
|
tmpl/__init__.py
|
tmpl/__init__.py
|
"""
All class templated which can be derivated.
List:
Platform
Prompt
"""
|
"""
All class templated which can be derivated.
List:
Platform
Prompt
"""
import Platform
import Prompt
|
Make a new entry to import all modules.
|
Make a new entry to import all modules.
|
Python
|
mit
|
nday-dev/Spider-Framework
|
"""
All class templated which can be derivated.
List:
Platform
Prompt
"""
Make a new entry to import all modules.
|
"""
All class templated which can be derivated.
List:
Platform
Prompt
"""
import Platform
import Prompt
|
<commit_before>"""
All class templated which can be derivated.
List:
Platform
Prompt
"""
<commit_msg>Make a new entry to import all modules.<commit_after>
|
"""
All class templated which can be derivated.
List:
Platform
Prompt
"""
import Platform
import Prompt
|
"""
All class templated which can be derivated.
List:
Platform
Prompt
"""
Make a new entry to import all modules."""
All class templated which can be derivated.
List:
Platform
Prompt
"""
import Platform
import Prompt
|
<commit_before>"""
All class templated which can be derivated.
List:
Platform
Prompt
"""
<commit_msg>Make a new entry to import all modules.<commit_after>"""
All class templated which can be derivated.
List:
Platform
Prompt
"""
import Platform
import Prompt
|
b013e96cb1762f46f3281ac61b5e1b53e07ede18
|
prime-factors/prime_factors.py
|
prime-factors/prime_factors.py
|
import sieve
def prime_factors(n):
primes = sieve.sieve(n)
factors = []
for p in primes:
while n % p == 0:
factors += [p]
n //= p
return factors
|
def prime_factors(n):
factors = []
factor = 2
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
factor += 1
return factors
|
Fix memory issues by just trying every number
|
Fix memory issues by just trying every number
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
import sieve
def prime_factors(n):
primes = sieve.sieve(n)
factors = []
for p in primes:
while n % p == 0:
factors += [p]
n //= p
return factors
Fix memory issues by just trying every number
|
def prime_factors(n):
factors = []
factor = 2
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
factor += 1
return factors
|
<commit_before>import sieve
def prime_factors(n):
primes = sieve.sieve(n)
factors = []
for p in primes:
while n % p == 0:
factors += [p]
n //= p
return factors
<commit_msg>Fix memory issues by just trying every number<commit_after>
|
def prime_factors(n):
factors = []
factor = 2
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
factor += 1
return factors
|
import sieve
def prime_factors(n):
primes = sieve.sieve(n)
factors = []
for p in primes:
while n % p == 0:
factors += [p]
n //= p
return factors
Fix memory issues by just trying every numberdef prime_factors(n):
factors = []
factor = 2
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
factor += 1
return factors
|
<commit_before>import sieve
def prime_factors(n):
primes = sieve.sieve(n)
factors = []
for p in primes:
while n % p == 0:
factors += [p]
n //= p
return factors
<commit_msg>Fix memory issues by just trying every number<commit_after>def prime_factors(n):
factors = []
factor = 2
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
factor += 1
return factors
|
df98c8bd70f25727810e6eb9d359cf1e14fd6645
|
update_prices.py
|
update_prices.py
|
import sqlite3
import urllib2
import xml.etree.ElementTree as ET
MARKET_URL = 'http://api.eve-central.com/api/marketstat?hours=24&%s'
ITEMS = [
34, # Tritanium
35, # Pyerite
36, # Mexallon
37, # Isogen
38, # Nocxium
39, # Zydrine
40, # Megacyte
11399, # Morphite
]
def main():
conn = sqlite3.connect('everdi.db')
cur = conn.cursor()
url = MARKET_URL % ('&'.join('typeid=%s' % i for i in ITEMS))
f = urllib2.urlopen(url)
data = f.read()
f.close()
#open('data.txt', 'w').write(data)
#data = open('data.txt').read()
root = ET.fromstring(data)
for t in root.findall('marketstat/type'):
typeid = t.get('id')
sell_median = t.find('sell/median').text
buy_median = t.find('buy/median').text
cur.execute('UPDATE blueprints_item SET sell_median=?, buy_median=? WHERE id=?', (sell_median, buy_median, typeid))
conn.commit()
if __name__ == '__main__':
main()
|
import sqlite3
import urllib2
import xml.etree.ElementTree as ET
MARKET_URL = 'http://api.eve-central.com/api/marketstat?hours=24&%s'
def main():
conn = sqlite3.connect('everdi.db')
cur = conn.cursor()
# Get all items used in current BlueprintInstances
cur.execute("""
SELECT DISTINCT c.item_id
FROM blueprints_blueprintcomponent c
INNER JOIN blueprints_blueprintinstance AS bi
ON c.blueprint_id = bi.blueprint_id
""")
rows = cur.fetchall()
for i in range(0, len(rows), 20):
url = MARKET_URL % ('&'.join('typeid=%s' % item for item in rows[i:i+20]))
f = urllib2.urlopen(url)
data = f.read()
f.close()
#open('data.txt', 'w').write(data)
#data = open('data.txt').read()
root = ET.fromstring(data)
for t in root.findall('marketstat/type'):
typeid = t.get('id')
sell_median = t.find('sell/median').text
buy_median = t.find('buy/median').text
cur.execute('UPDATE blueprints_item SET sell_median=?, buy_median=? WHERE id=?', (sell_median, buy_median, typeid))
conn.commit()
if __name__ == '__main__':
main()
|
Update prices for all BlueprintInstances we currently have
|
Update prices for all BlueprintInstances we currently have
|
Python
|
bsd-2-clause
|
madcowfred/evething,Gillingham/evething,cmptrgeekken/evething,madcowfred/evething,cmptrgeekken/evething,cmptrgeekken/evething,cmptrgeekken/evething,madcowfred/evething,Gillingham/evething,madcowfred/evething,cmptrgeekken/evething,Gillingham/evething,Gillingham/evething
|
import sqlite3
import urllib2
import xml.etree.ElementTree as ET
MARKET_URL = 'http://api.eve-central.com/api/marketstat?hours=24&%s'
ITEMS = [
34, # Tritanium
35, # Pyerite
36, # Mexallon
37, # Isogen
38, # Nocxium
39, # Zydrine
40, # Megacyte
11399, # Morphite
]
def main():
conn = sqlite3.connect('everdi.db')
cur = conn.cursor()
url = MARKET_URL % ('&'.join('typeid=%s' % i for i in ITEMS))
f = urllib2.urlopen(url)
data = f.read()
f.close()
#open('data.txt', 'w').write(data)
#data = open('data.txt').read()
root = ET.fromstring(data)
for t in root.findall('marketstat/type'):
typeid = t.get('id')
sell_median = t.find('sell/median').text
buy_median = t.find('buy/median').text
cur.execute('UPDATE blueprints_item SET sell_median=?, buy_median=? WHERE id=?', (sell_median, buy_median, typeid))
conn.commit()
if __name__ == '__main__':
main()
Update prices for all BlueprintInstances we currently have
|
import sqlite3
import urllib2
import xml.etree.ElementTree as ET
MARKET_URL = 'http://api.eve-central.com/api/marketstat?hours=24&%s'
def main():
conn = sqlite3.connect('everdi.db')
cur = conn.cursor()
# Get all items used in current BlueprintInstances
cur.execute("""
SELECT DISTINCT c.item_id
FROM blueprints_blueprintcomponent c
INNER JOIN blueprints_blueprintinstance AS bi
ON c.blueprint_id = bi.blueprint_id
""")
rows = cur.fetchall()
for i in range(0, len(rows), 20):
url = MARKET_URL % ('&'.join('typeid=%s' % item for item in rows[i:i+20]))
f = urllib2.urlopen(url)
data = f.read()
f.close()
#open('data.txt', 'w').write(data)
#data = open('data.txt').read()
root = ET.fromstring(data)
for t in root.findall('marketstat/type'):
typeid = t.get('id')
sell_median = t.find('sell/median').text
buy_median = t.find('buy/median').text
cur.execute('UPDATE blueprints_item SET sell_median=?, buy_median=? WHERE id=?', (sell_median, buy_median, typeid))
conn.commit()
if __name__ == '__main__':
main()
|
<commit_before>import sqlite3
import urllib2
import xml.etree.ElementTree as ET
MARKET_URL = 'http://api.eve-central.com/api/marketstat?hours=24&%s'
ITEMS = [
34, # Tritanium
35, # Pyerite
36, # Mexallon
37, # Isogen
38, # Nocxium
39, # Zydrine
40, # Megacyte
11399, # Morphite
]
def main():
conn = sqlite3.connect('everdi.db')
cur = conn.cursor()
url = MARKET_URL % ('&'.join('typeid=%s' % i for i in ITEMS))
f = urllib2.urlopen(url)
data = f.read()
f.close()
#open('data.txt', 'w').write(data)
#data = open('data.txt').read()
root = ET.fromstring(data)
for t in root.findall('marketstat/type'):
typeid = t.get('id')
sell_median = t.find('sell/median').text
buy_median = t.find('buy/median').text
cur.execute('UPDATE blueprints_item SET sell_median=?, buy_median=? WHERE id=?', (sell_median, buy_median, typeid))
conn.commit()
if __name__ == '__main__':
main()
<commit_msg>Update prices for all BlueprintInstances we currently have<commit_after>
|
import sqlite3
import urllib2
import xml.etree.ElementTree as ET
MARKET_URL = 'http://api.eve-central.com/api/marketstat?hours=24&%s'
def main():
conn = sqlite3.connect('everdi.db')
cur = conn.cursor()
# Get all items used in current BlueprintInstances
cur.execute("""
SELECT DISTINCT c.item_id
FROM blueprints_blueprintcomponent c
INNER JOIN blueprints_blueprintinstance AS bi
ON c.blueprint_id = bi.blueprint_id
""")
rows = cur.fetchall()
for i in range(0, len(rows), 20):
url = MARKET_URL % ('&'.join('typeid=%s' % item for item in rows[i:i+20]))
f = urllib2.urlopen(url)
data = f.read()
f.close()
#open('data.txt', 'w').write(data)
#data = open('data.txt').read()
root = ET.fromstring(data)
for t in root.findall('marketstat/type'):
typeid = t.get('id')
sell_median = t.find('sell/median').text
buy_median = t.find('buy/median').text
cur.execute('UPDATE blueprints_item SET sell_median=?, buy_median=? WHERE id=?', (sell_median, buy_median, typeid))
conn.commit()
if __name__ == '__main__':
main()
|
import sqlite3
import urllib2
import xml.etree.ElementTree as ET
MARKET_URL = 'http://api.eve-central.com/api/marketstat?hours=24&%s'
ITEMS = [
34, # Tritanium
35, # Pyerite
36, # Mexallon
37, # Isogen
38, # Nocxium
39, # Zydrine
40, # Megacyte
11399, # Morphite
]
def main():
conn = sqlite3.connect('everdi.db')
cur = conn.cursor()
url = MARKET_URL % ('&'.join('typeid=%s' % i for i in ITEMS))
f = urllib2.urlopen(url)
data = f.read()
f.close()
#open('data.txt', 'w').write(data)
#data = open('data.txt').read()
root = ET.fromstring(data)
for t in root.findall('marketstat/type'):
typeid = t.get('id')
sell_median = t.find('sell/median').text
buy_median = t.find('buy/median').text
cur.execute('UPDATE blueprints_item SET sell_median=?, buy_median=? WHERE id=?', (sell_median, buy_median, typeid))
conn.commit()
if __name__ == '__main__':
main()
Update prices for all BlueprintInstances we currently haveimport sqlite3
import urllib2
import xml.etree.ElementTree as ET
MARKET_URL = 'http://api.eve-central.com/api/marketstat?hours=24&%s'
def main():
conn = sqlite3.connect('everdi.db')
cur = conn.cursor()
# Get all items used in current BlueprintInstances
cur.execute("""
SELECT DISTINCT c.item_id
FROM blueprints_blueprintcomponent c
INNER JOIN blueprints_blueprintinstance AS bi
ON c.blueprint_id = bi.blueprint_id
""")
rows = cur.fetchall()
for i in range(0, len(rows), 20):
url = MARKET_URL % ('&'.join('typeid=%s' % item for item in rows[i:i+20]))
f = urllib2.urlopen(url)
data = f.read()
f.close()
#open('data.txt', 'w').write(data)
#data = open('data.txt').read()
root = ET.fromstring(data)
for t in root.findall('marketstat/type'):
typeid = t.get('id')
sell_median = t.find('sell/median').text
buy_median = t.find('buy/median').text
cur.execute('UPDATE blueprints_item SET sell_median=?, buy_median=? WHERE id=?', (sell_median, buy_median, typeid))
conn.commit()
if __name__ == '__main__':
main()
|
<commit_before>import sqlite3
import urllib2
import xml.etree.ElementTree as ET
MARKET_URL = 'http://api.eve-central.com/api/marketstat?hours=24&%s'
ITEMS = [
34, # Tritanium
35, # Pyerite
36, # Mexallon
37, # Isogen
38, # Nocxium
39, # Zydrine
40, # Megacyte
11399, # Morphite
]
def main():
conn = sqlite3.connect('everdi.db')
cur = conn.cursor()
url = MARKET_URL % ('&'.join('typeid=%s' % i for i in ITEMS))
f = urllib2.urlopen(url)
data = f.read()
f.close()
#open('data.txt', 'w').write(data)
#data = open('data.txt').read()
root = ET.fromstring(data)
for t in root.findall('marketstat/type'):
typeid = t.get('id')
sell_median = t.find('sell/median').text
buy_median = t.find('buy/median').text
cur.execute('UPDATE blueprints_item SET sell_median=?, buy_median=? WHERE id=?', (sell_median, buy_median, typeid))
conn.commit()
if __name__ == '__main__':
main()
<commit_msg>Update prices for all BlueprintInstances we currently have<commit_after>import sqlite3
import urllib2
import xml.etree.ElementTree as ET
MARKET_URL = 'http://api.eve-central.com/api/marketstat?hours=24&%s'
def main():
conn = sqlite3.connect('everdi.db')
cur = conn.cursor()
# Get all items used in current BlueprintInstances
cur.execute("""
SELECT DISTINCT c.item_id
FROM blueprints_blueprintcomponent c
INNER JOIN blueprints_blueprintinstance AS bi
ON c.blueprint_id = bi.blueprint_id
""")
rows = cur.fetchall()
for i in range(0, len(rows), 20):
url = MARKET_URL % ('&'.join('typeid=%s' % item for item in rows[i:i+20]))
f = urllib2.urlopen(url)
data = f.read()
f.close()
#open('data.txt', 'w').write(data)
#data = open('data.txt').read()
root = ET.fromstring(data)
for t in root.findall('marketstat/type'):
typeid = t.get('id')
sell_median = t.find('sell/median').text
buy_median = t.find('buy/median').text
cur.execute('UPDATE blueprints_item SET sell_median=?, buy_median=? WHERE id=?', (sell_median, buy_median, typeid))
conn.commit()
if __name__ == '__main__':
main()
|
b1a33b1a89c00ee6de7949c529ecd4bcf2d38578
|
python/helper/asset_loading.py
|
python/helper/asset_loading.py
|
# Defining a function to load images
def load_image(image_name, fade_enabled=False):
"""fade_enabled should be True if you want images to be able to fade"""
try:
#! Add stuff for loading images of the correct resolution
# depending on the player's resolution settings
if not fade_enabled:
return pygame.image.load("".join((
file_directory, "Image Files\\",
image_name, ".png"
))).convert_alpha() # Fixes per pixel alphas permanently
else:
return pygame.image.load("".join((
file_directory, "Image Files\\",
image_name, ".png"
))).convert()
except Exception as error:
log("".join(("Failed to load image: ", image_name, ".png")))
|
# Defining a function to load images
def load_image(image_name, fade_enabled=False):
"""fade_enabled should be True if you want images to be able to fade"""
try:
#! Add stuff for loading images of the correct resolution
# depending on the player's resolution settings
if not fade_enabled:
return pygame.image.load("".join((
file_directory, "assets\\images\\",
image_name, ".png"
))).convert_alpha() # Fixes per pixel alphas permanently
else:
return pygame.image.load("".join((
file_directory, "assets\\images\\",
image_name, ".png"
))).convert()
except Exception as error:
log("".join(("Failed to load image: ", image_name, ".png")))
|
Fix error in file path of where images are loaded from in load_image()
|
Fix error in file path of where images are loaded from in load_image()
|
Python
|
mit
|
AndyDeany/pygame-template
|
# Defining a function to load images
def load_image(image_name, fade_enabled=False):
"""fade_enabled should be True if you want images to be able to fade"""
try:
#! Add stuff for loading images of the correct resolution
# depending on the player's resolution settings
if not fade_enabled:
return pygame.image.load("".join((
file_directory, "Image Files\\",
image_name, ".png"
))).convert_alpha() # Fixes per pixel alphas permanently
else:
return pygame.image.load("".join((
file_directory, "Image Files\\",
image_name, ".png"
))).convert()
except Exception as error:
log("".join(("Failed to load image: ", image_name, ".png")))
Fix error in file path of where images are loaded from in load_image()
|
# Defining a function to load images
def load_image(image_name, fade_enabled=False):
"""fade_enabled should be True if you want images to be able to fade"""
try:
#! Add stuff for loading images of the correct resolution
# depending on the player's resolution settings
if not fade_enabled:
return pygame.image.load("".join((
file_directory, "assets\\images\\",
image_name, ".png"
))).convert_alpha() # Fixes per pixel alphas permanently
else:
return pygame.image.load("".join((
file_directory, "assets\\images\\",
image_name, ".png"
))).convert()
except Exception as error:
log("".join(("Failed to load image: ", image_name, ".png")))
|
<commit_before># Defining a function to load images
def load_image(image_name, fade_enabled=False):
"""fade_enabled should be True if you want images to be able to fade"""
try:
#! Add stuff for loading images of the correct resolution
# depending on the player's resolution settings
if not fade_enabled:
return pygame.image.load("".join((
file_directory, "Image Files\\",
image_name, ".png"
))).convert_alpha() # Fixes per pixel alphas permanently
else:
return pygame.image.load("".join((
file_directory, "Image Files\\",
image_name, ".png"
))).convert()
except Exception as error:
log("".join(("Failed to load image: ", image_name, ".png")))
<commit_msg>Fix error in file path of where images are loaded from in load_image()<commit_after>
|
# Defining a function to load images
def load_image(image_name, fade_enabled=False):
"""fade_enabled should be True if you want images to be able to fade"""
try:
#! Add stuff for loading images of the correct resolution
# depending on the player's resolution settings
if not fade_enabled:
return pygame.image.load("".join((
file_directory, "assets\\images\\",
image_name, ".png"
))).convert_alpha() # Fixes per pixel alphas permanently
else:
return pygame.image.load("".join((
file_directory, "assets\\images\\",
image_name, ".png"
))).convert()
except Exception as error:
log("".join(("Failed to load image: ", image_name, ".png")))
|
# Defining a function to load images
def load_image(image_name, fade_enabled=False):
"""fade_enabled should be True if you want images to be able to fade"""
try:
#! Add stuff for loading images of the correct resolution
# depending on the player's resolution settings
if not fade_enabled:
return pygame.image.load("".join((
file_directory, "Image Files\\",
image_name, ".png"
))).convert_alpha() # Fixes per pixel alphas permanently
else:
return pygame.image.load("".join((
file_directory, "Image Files\\",
image_name, ".png"
))).convert()
except Exception as error:
log("".join(("Failed to load image: ", image_name, ".png")))
Fix error in file path of where images are loaded from in load_image()# Defining a function to load images
def load_image(image_name, fade_enabled=False):
"""fade_enabled should be True if you want images to be able to fade"""
try:
#! Add stuff for loading images of the correct resolution
# depending on the player's resolution settings
if not fade_enabled:
return pygame.image.load("".join((
file_directory, "assets\\images\\",
image_name, ".png"
))).convert_alpha() # Fixes per pixel alphas permanently
else:
return pygame.image.load("".join((
file_directory, "assets\\images\\",
image_name, ".png"
))).convert()
except Exception as error:
log("".join(("Failed to load image: ", image_name, ".png")))
|
<commit_before># Defining a function to load images
def load_image(image_name, fade_enabled=False):
"""fade_enabled should be True if you want images to be able to fade"""
try:
#! Add stuff for loading images of the correct resolution
# depending on the player's resolution settings
if not fade_enabled:
return pygame.image.load("".join((
file_directory, "Image Files\\",
image_name, ".png"
))).convert_alpha() # Fixes per pixel alphas permanently
else:
return pygame.image.load("".join((
file_directory, "Image Files\\",
image_name, ".png"
))).convert()
except Exception as error:
log("".join(("Failed to load image: ", image_name, ".png")))
<commit_msg>Fix error in file path of where images are loaded from in load_image()<commit_after># Defining a function to load images
def load_image(image_name, fade_enabled=False):
"""fade_enabled should be True if you want images to be able to fade"""
try:
#! Add stuff for loading images of the correct resolution
# depending on the player's resolution settings
if not fade_enabled:
return pygame.image.load("".join((
file_directory, "assets\\images\\",
image_name, ".png"
))).convert_alpha() # Fixes per pixel alphas permanently
else:
return pygame.image.load("".join((
file_directory, "assets\\images\\",
image_name, ".png"
))).convert()
except Exception as error:
log("".join(("Failed to load image: ", image_name, ".png")))
|
3103bba12ee3580b3f707e88dd23e853af7b47e0
|
calc.py
|
calc.py
|
#!/usr/bin/env python
"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
|
#!/usr/bin/env python
"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
print ':)'
|
Add smiley face to output
|
Add smiley face to output
|
Python
|
bsd-3-clause
|
mkuiper/calc
|
#!/usr/bin/env python
"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
Add smiley face to output
|
#!/usr/bin/env python
"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
print ':)'
|
<commit_before>#!/usr/bin/env python
"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
<commit_msg>Add smiley face to output<commit_after>
|
#!/usr/bin/env python
"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
print ':)'
|
#!/usr/bin/env python
"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
Add smiley face to output#!/usr/bin/env python
"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
print ':)'
|
<commit_before>#!/usr/bin/env python
"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
<commit_msg>Add smiley face to output<commit_after>#!/usr/bin/env python
"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
print ':)'
|
54f70d759b2e0d384d626f4b55016166f9b26f16
|
camelot/roundtable/migrations/0002_add_knight_data.py
|
camelot/roundtable/migrations/0002_add_knight_data.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.core.management import call_command
def add_knight_data(apps, schema_editor):
call_command('loaddata', 'knight_data.json')
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_knight_data(apps, schema_editor):
Knight = apps.get_model('roundtable', 'Knight')
Knight.objects.bulk_create([
Knight(name='Arthur'),
Knight(name='Bedevere'),
Knight(name='Bors'),
Knight(name='Ector'),
Knight(name='Galahad'),
Knight(name='Gawain'),
Knight(name='Lancelot'),
Knight(name='Robin'),
])
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
|
Implement add_knight_data to generate data directly.
|
Implement add_knight_data to generate data directly.
|
Python
|
bsd-2-clause
|
jambonrose/djangocon2014-updj17
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.core.management import call_command
def add_knight_data(apps, schema_editor):
call_command('loaddata', 'knight_data.json')
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
Implement add_knight_data to generate data directly.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_knight_data(apps, schema_editor):
Knight = apps.get_model('roundtable', 'Knight')
Knight.objects.bulk_create([
Knight(name='Arthur'),
Knight(name='Bedevere'),
Knight(name='Bors'),
Knight(name='Ector'),
Knight(name='Galahad'),
Knight(name='Gawain'),
Knight(name='Lancelot'),
Knight(name='Robin'),
])
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.core.management import call_command
def add_knight_data(apps, schema_editor):
call_command('loaddata', 'knight_data.json')
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
<commit_msg>Implement add_knight_data to generate data directly.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_knight_data(apps, schema_editor):
Knight = apps.get_model('roundtable', 'Knight')
Knight.objects.bulk_create([
Knight(name='Arthur'),
Knight(name='Bedevere'),
Knight(name='Bors'),
Knight(name='Ector'),
Knight(name='Galahad'),
Knight(name='Gawain'),
Knight(name='Lancelot'),
Knight(name='Robin'),
])
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.core.management import call_command
def add_knight_data(apps, schema_editor):
call_command('loaddata', 'knight_data.json')
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
Implement add_knight_data to generate data directly.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_knight_data(apps, schema_editor):
Knight = apps.get_model('roundtable', 'Knight')
Knight.objects.bulk_create([
Knight(name='Arthur'),
Knight(name='Bedevere'),
Knight(name='Bors'),
Knight(name='Ector'),
Knight(name='Galahad'),
Knight(name='Gawain'),
Knight(name='Lancelot'),
Knight(name='Robin'),
])
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.core.management import call_command
def add_knight_data(apps, schema_editor):
call_command('loaddata', 'knight_data.json')
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
<commit_msg>Implement add_knight_data to generate data directly.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_knight_data(apps, schema_editor):
Knight = apps.get_model('roundtable', 'Knight')
Knight.objects.bulk_create([
Knight(name='Arthur'),
Knight(name='Bedevere'),
Knight(name='Bors'),
Knight(name='Ector'),
Knight(name='Galahad'),
Knight(name='Gawain'),
Knight(name='Lancelot'),
Knight(name='Robin'),
])
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
|
8f0d56334243fa51b401b18139cefeefd26b6a9d
|
core/cb.project/python/sample/app.py
|
core/cb.project/python/sample/app.py
|
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(
port=int(os.environ.get('PORT', 5000))
)
|
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(
host="0.0.0.0",
port=int(os.environ.get('PORT', 5000))
)
|
Fix error when running python sample on codebox.io
|
Fix error when running python sample on codebox.io
|
Python
|
apache-2.0
|
nobutakaoshiro/codebox,indykish/codebox,blubrackets/codebox,lcamilo15/codebox,rodrigues-daniel/codebox,rodrigues-daniel/codebox,Ckai1991/codebox,smallbal/codebox,code-box/codebox,rajthilakmca/codebox,fly19890211/codebox,ronoaldo/codebox,quietdog/codebox,LogeshEswar/codebox,listepo/codebox,indykish/codebox,CodeboxIDE/codebox,LogeshEswar/codebox,listepo/codebox,etopian/codebox,fly19890211/codebox,code-box/codebox,Ckai1991/codebox,ronoaldo/codebox,lcamilo15/codebox,smallbal/codebox,rajthilakmca/codebox,kustomzone/codebox,nobutakaoshiro/codebox,kustomzone/codebox,blubrackets/codebox,quietdog/codebox,CodeboxIDE/codebox,ahmadassaf/Codebox,etopian/codebox,ahmadassaf/Codebox
|
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(
port=int(os.environ.get('PORT', 5000))
)
Fix error when running python sample on codebox.io
|
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(
host="0.0.0.0",
port=int(os.environ.get('PORT', 5000))
)
|
<commit_before>import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(
port=int(os.environ.get('PORT', 5000))
)
<commit_msg>Fix error when running python sample on codebox.io<commit_after>
|
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(
host="0.0.0.0",
port=int(os.environ.get('PORT', 5000))
)
|
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(
port=int(os.environ.get('PORT', 5000))
)
Fix error when running python sample on codebox.ioimport os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(
host="0.0.0.0",
port=int(os.environ.get('PORT', 5000))
)
|
<commit_before>import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(
port=int(os.environ.get('PORT', 5000))
)
<commit_msg>Fix error when running python sample on codebox.io<commit_after>import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(
host="0.0.0.0",
port=int(os.environ.get('PORT', 5000))
)
|
8b46628656e0e649a9c973c911c01e44222906b7
|
anchor/models.py
|
anchor/models.py
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.token = data.get('token')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.region = data.get('region').lower()
self.servers = data.get('servers')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.region = data.get('region').lower()
self.servers = data.get('servers')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
Remove token from being stored
|
Remove token from being stored
|
Python
|
apache-2.0
|
oldarmyc/anchor,oldarmyc/anchor,oldarmyc/anchor
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.token = data.get('token')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.region = data.get('region').lower()
self.servers = data.get('servers')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
Remove token from being stored
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.region = data.get('region').lower()
self.servers = data.get('servers')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
<commit_before># Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.token = data.get('token')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.region = data.get('region').lower()
self.servers = data.get('servers')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
<commit_msg>Remove token from being stored<commit_after>
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.region = data.get('region').lower()
self.servers = data.get('servers')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.token = data.get('token')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.region = data.get('region').lower()
self.servers = data.get('servers')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
Remove token from being stored# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.region = data.get('region').lower()
self.servers = data.get('servers')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
<commit_before># Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.token = data.get('token')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.region = data.get('region').lower()
self.servers = data.get('servers')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
<commit_msg>Remove token from being stored<commit_after># Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.region = data.get('region').lower()
self.servers = data.get('servers')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
70a97ab38d2b30652c41d1e058ef4447fdd54863
|
test_settings.py
|
test_settings.py
|
import os
SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn#tz26vuq4"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.sites",
"django.contrib.sessions",
"django.contrib.contenttypes",
"tz_detect",
]
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"tz_detect.middleware.TimezoneMiddleware",
]
MIDDLEWARE = MIDDLEWARE_CLASSES
SITE_ID = 1
|
import os
SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn#tz26vuq4"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.sites",
"django.contrib.sessions",
"django.contrib.contenttypes",
"tz_detect",
]
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"tz_detect.middleware.TimezoneMiddleware",
]
MIDDLEWARE = MIDDLEWARE_CLASSES
SITE_ID = 1
USE_TZ = True
|
Fix Django 5.0 deprecation warning.
|
Fix Django 5.0 deprecation warning.
|
Python
|
mit
|
adamcharnock/django-tz-detect,adamcharnock/django-tz-detect,adamcharnock/django-tz-detect
|
import os
SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn#tz26vuq4"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.sites",
"django.contrib.sessions",
"django.contrib.contenttypes",
"tz_detect",
]
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"tz_detect.middleware.TimezoneMiddleware",
]
MIDDLEWARE = MIDDLEWARE_CLASSES
SITE_ID = 1
Fix Django 5.0 deprecation warning.
|
import os
SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn#tz26vuq4"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.sites",
"django.contrib.sessions",
"django.contrib.contenttypes",
"tz_detect",
]
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"tz_detect.middleware.TimezoneMiddleware",
]
MIDDLEWARE = MIDDLEWARE_CLASSES
SITE_ID = 1
USE_TZ = True
|
<commit_before>import os
SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn#tz26vuq4"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.sites",
"django.contrib.sessions",
"django.contrib.contenttypes",
"tz_detect",
]
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"tz_detect.middleware.TimezoneMiddleware",
]
MIDDLEWARE = MIDDLEWARE_CLASSES
SITE_ID = 1
<commit_msg>Fix Django 5.0 deprecation warning.<commit_after>
|
import os
SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn#tz26vuq4"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.sites",
"django.contrib.sessions",
"django.contrib.contenttypes",
"tz_detect",
]
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"tz_detect.middleware.TimezoneMiddleware",
]
MIDDLEWARE = MIDDLEWARE_CLASSES
SITE_ID = 1
USE_TZ = True
|
import os
SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn#tz26vuq4"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.sites",
"django.contrib.sessions",
"django.contrib.contenttypes",
"tz_detect",
]
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"tz_detect.middleware.TimezoneMiddleware",
]
MIDDLEWARE = MIDDLEWARE_CLASSES
SITE_ID = 1
Fix Django 5.0 deprecation warning.import os
SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn#tz26vuq4"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.sites",
"django.contrib.sessions",
"django.contrib.contenttypes",
"tz_detect",
]
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"tz_detect.middleware.TimezoneMiddleware",
]
MIDDLEWARE = MIDDLEWARE_CLASSES
SITE_ID = 1
USE_TZ = True
|
<commit_before>import os
SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn#tz26vuq4"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.sites",
"django.contrib.sessions",
"django.contrib.contenttypes",
"tz_detect",
]
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"tz_detect.middleware.TimezoneMiddleware",
]
MIDDLEWARE = MIDDLEWARE_CLASSES
SITE_ID = 1
<commit_msg>Fix Django 5.0 deprecation warning.<commit_after>import os
SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn#tz26vuq4"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.sites",
"django.contrib.sessions",
"django.contrib.contenttypes",
"tz_detect",
]
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"tz_detect.middleware.TimezoneMiddleware",
]
MIDDLEWARE = MIDDLEWARE_CLASSES
SITE_ID = 1
USE_TZ = True
|
8e13a10af23991bbd8aa6e59b6890fa729fe698f
|
social/info.py
|
social/info.py
|
"""
Contains DemographicInfo class, which stores info found by accounts.
"""
from collections import UserDict
class DemographicInfo(UserDict):
def __setitem__(self, key, value):
if key in self.data:
self.data[key].add(value)
else:
self.data[key] = set([value])
|
"""
Contains DemographicInfo class, which stores info found by accounts.
"""
from collections import UserDict
import logging
log = logging.getLogger('social.info')
class DemographicInfo(UserDict):
def __setitem__(self, key, value):
# Ignore empty strings...
if type(value) is str and value == '':
return
log.debug('DemographicInfo: %r=%r', key, value)
# Otherwise, do the set stuff.
if key in self.data:
self.data[key].add(value)
else:
self.data[key] = set([value])
|
Add logging for DemographicInfo, and ignore empty strings.
|
Add logging for DemographicInfo, and ignore empty strings.
|
Python
|
bsd-3-clause
|
brenns10/social,brenns10/social
|
"""
Contains DemographicInfo class, which stores info found by accounts.
"""
from collections import UserDict
class DemographicInfo(UserDict):
def __setitem__(self, key, value):
if key in self.data:
self.data[key].add(value)
else:
self.data[key] = set([value])
Add logging for DemographicInfo, and ignore empty strings.
|
"""
Contains DemographicInfo class, which stores info found by accounts.
"""
from collections import UserDict
import logging
log = logging.getLogger('social.info')
class DemographicInfo(UserDict):
def __setitem__(self, key, value):
# Ignore empty strings...
if type(value) is str and value == '':
return
log.debug('DemographicInfo: %r=%r', key, value)
# Otherwise, do the set stuff.
if key in self.data:
self.data[key].add(value)
else:
self.data[key] = set([value])
|
<commit_before>"""
Contains DemographicInfo class, which stores info found by accounts.
"""
from collections import UserDict
class DemographicInfo(UserDict):
def __setitem__(self, key, value):
if key in self.data:
self.data[key].add(value)
else:
self.data[key] = set([value])
<commit_msg>Add logging for DemographicInfo, and ignore empty strings.<commit_after>
|
"""
Contains DemographicInfo class, which stores info found by accounts.
"""
from collections import UserDict
import logging
log = logging.getLogger('social.info')
class DemographicInfo(UserDict):
def __setitem__(self, key, value):
# Ignore empty strings...
if type(value) is str and value == '':
return
log.debug('DemographicInfo: %r=%r', key, value)
# Otherwise, do the set stuff.
if key in self.data:
self.data[key].add(value)
else:
self.data[key] = set([value])
|
"""
Contains DemographicInfo class, which stores info found by accounts.
"""
from collections import UserDict
class DemographicInfo(UserDict):
def __setitem__(self, key, value):
if key in self.data:
self.data[key].add(value)
else:
self.data[key] = set([value])
Add logging for DemographicInfo, and ignore empty strings."""
Contains DemographicInfo class, which stores info found by accounts.
"""
from collections import UserDict
import logging
log = logging.getLogger('social.info')
class DemographicInfo(UserDict):
def __setitem__(self, key, value):
# Ignore empty strings...
if type(value) is str and value == '':
return
log.debug('DemographicInfo: %r=%r', key, value)
# Otherwise, do the set stuff.
if key in self.data:
self.data[key].add(value)
else:
self.data[key] = set([value])
|
<commit_before>"""
Contains DemographicInfo class, which stores info found by accounts.
"""
from collections import UserDict
class DemographicInfo(UserDict):
def __setitem__(self, key, value):
if key in self.data:
self.data[key].add(value)
else:
self.data[key] = set([value])
<commit_msg>Add logging for DemographicInfo, and ignore empty strings.<commit_after>"""
Contains DemographicInfo class, which stores info found by accounts.
"""
from collections import UserDict
import logging
log = logging.getLogger('social.info')
class DemographicInfo(UserDict):
def __setitem__(self, key, value):
# Ignore empty strings...
if type(value) is str and value == '':
return
log.debug('DemographicInfo: %r=%r', key, value)
# Otherwise, do the set stuff.
if key in self.data:
self.data[key].add(value)
else:
self.data[key] = set([value])
|
dcc309f0634eac6d7461d506cac4bc5b9cfbc311
|
experiments/T1T2/RamseySequence.py
|
experiments/T1T2/RamseySequence.py
|
import argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('qubit', help='qubit name')
parser.add_argument('stop', help='longest delay in ns', type = int)
parser.add_argument('step', help='delay step in ns', type = int)
args = parser.parse_args()
from QGL import *
q = QubitFactory(args.qubit)
RamseyStop = args.stop
RamseyStep = args.step
print RamseyStep
print RamseyStop
Ramsey(q, np.arange(0,RamseyStop/1e9,RamseyStep/1e9), suffix=True)
|
import argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('qubit', help='qubit name')
parser.add_argument('stop', help='longest delay in ns', type = int)
parser.add_argument('step', help='delay step in ns', type = int)
args = parser.parse_args()
from QGL import *
q = QubitFactory(args.qubit)
RamseyStop = args.stop
RamseyStep = args.step
Ramsey(q, np.arange(0,RamseyStop/1e9,RamseyStep/1e9), suffix=True)
|
Remove unnecessary (and py2) print
|
Remove unnecessary (and py2) print
|
Python
|
apache-2.0
|
BBN-Q/Qlab,BBN-Q/Qlab,BBN-Q/Qlab,BBN-Q/Qlab
|
import argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('qubit', help='qubit name')
parser.add_argument('stop', help='longest delay in ns', type = int)
parser.add_argument('step', help='delay step in ns', type = int)
args = parser.parse_args()
from QGL import *
q = QubitFactory(args.qubit)
RamseyStop = args.stop
RamseyStep = args.step
print RamseyStep
print RamseyStop
Ramsey(q, np.arange(0,RamseyStop/1e9,RamseyStep/1e9), suffix=True)
Remove unnecessary (and py2) print
|
import argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('qubit', help='qubit name')
parser.add_argument('stop', help='longest delay in ns', type = int)
parser.add_argument('step', help='delay step in ns', type = int)
args = parser.parse_args()
from QGL import *
q = QubitFactory(args.qubit)
RamseyStop = args.stop
RamseyStep = args.step
Ramsey(q, np.arange(0,RamseyStop/1e9,RamseyStep/1e9), suffix=True)
|
<commit_before>import argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('qubit', help='qubit name')
parser.add_argument('stop', help='longest delay in ns', type = int)
parser.add_argument('step', help='delay step in ns', type = int)
args = parser.parse_args()
from QGL import *
q = QubitFactory(args.qubit)
RamseyStop = args.stop
RamseyStep = args.step
print RamseyStep
print RamseyStop
Ramsey(q, np.arange(0,RamseyStop/1e9,RamseyStep/1e9), suffix=True)
<commit_msg>Remove unnecessary (and py2) print<commit_after>
|
import argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('qubit', help='qubit name')
parser.add_argument('stop', help='longest delay in ns', type = int)
parser.add_argument('step', help='delay step in ns', type = int)
args = parser.parse_args()
from QGL import *
q = QubitFactory(args.qubit)
RamseyStop = args.stop
RamseyStep = args.step
Ramsey(q, np.arange(0,RamseyStop/1e9,RamseyStep/1e9), suffix=True)
|
import argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('qubit', help='qubit name')
parser.add_argument('stop', help='longest delay in ns', type = int)
parser.add_argument('step', help='delay step in ns', type = int)
args = parser.parse_args()
from QGL import *
q = QubitFactory(args.qubit)
RamseyStop = args.stop
RamseyStep = args.step
print RamseyStep
print RamseyStop
Ramsey(q, np.arange(0,RamseyStop/1e9,RamseyStep/1e9), suffix=True)
Remove unnecessary (and py2) printimport argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('qubit', help='qubit name')
parser.add_argument('stop', help='longest delay in ns', type = int)
parser.add_argument('step', help='delay step in ns', type = int)
args = parser.parse_args()
from QGL import *
q = QubitFactory(args.qubit)
RamseyStop = args.stop
RamseyStep = args.step
Ramsey(q, np.arange(0,RamseyStop/1e9,RamseyStep/1e9), suffix=True)
|
<commit_before>import argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('qubit', help='qubit name')
parser.add_argument('stop', help='longest delay in ns', type = int)
parser.add_argument('step', help='delay step in ns', type = int)
args = parser.parse_args()
from QGL import *
q = QubitFactory(args.qubit)
RamseyStop = args.stop
RamseyStep = args.step
print RamseyStep
print RamseyStop
Ramsey(q, np.arange(0,RamseyStop/1e9,RamseyStep/1e9), suffix=True)
<commit_msg>Remove unnecessary (and py2) print<commit_after>import argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('qubit', help='qubit name')
parser.add_argument('stop', help='longest delay in ns', type = int)
parser.add_argument('step', help='delay step in ns', type = int)
args = parser.parse_args()
from QGL import *
q = QubitFactory(args.qubit)
RamseyStop = args.stop
RamseyStep = args.step
Ramsey(q, np.arange(0,RamseyStop/1e9,RamseyStep/1e9), suffix=True)
|
3d48b9b976603ea8c13937a0c6eac8a74d62b72d
|
calexicon/calendars/other.py
|
calexicon/calendars/other.py
|
from datetime import date as vanilla_date, timedelta
from .base import Calendar
class JulianDayNumber(Calendar):
first_ce_day = vanilla_date(1, 1, 1)
first_ce_day_number = 1721423
display_name = "Julian Day Number"
@staticmethod
def date_display_string(d):
n = JulianDayNumber._day_number(d)
return "Day %d" % n
@staticmethod
def representation(d):
return {'day_number': JulianDayNumber._day_number(d)}
@staticmethod
def _day_number(d):
return (d - JulianDayNumber.first_ce_day).days + JulianDayNumber.first_ce_day_number
def date(self, n):
vd = self.first_ce_day + timedelta(days=n - self.first_ce_day_number)
return self.bless(vd)
|
from datetime import date as vanilla_date, timedelta
from .base import Calendar
from ..dates.bce import BCEDate
class JulianDayNumber(Calendar):
first_ce_day = vanilla_date(1, 1, 1)
first_ce_day_number = 1721423
display_name = "Julian Day Number"
@staticmethod
def date_display_string(d):
n = JulianDayNumber._day_number(d)
return "Day %d" % n
@staticmethod
def representation(d):
return {'day_number': JulianDayNumber._day_number(d)}
@staticmethod
def _day_number(d):
return (d - JulianDayNumber.first_ce_day).days + JulianDayNumber.first_ce_day_number
def date(self, n):
offset = n - self.first_ce_day_number
if offset >= 0:
vd = self.first_ce_day + timedelta(days=offset)
return JulianDayNumber().from_date(vd)
else:
d = BCEDate(1, 1, 1)
self.bless(d)
return d
|
Make a BCEDate for the dates before CE day 1.
|
Make a BCEDate for the dates before CE day 1.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
from datetime import date as vanilla_date, timedelta
from .base import Calendar
class JulianDayNumber(Calendar):
first_ce_day = vanilla_date(1, 1, 1)
first_ce_day_number = 1721423
display_name = "Julian Day Number"
@staticmethod
def date_display_string(d):
n = JulianDayNumber._day_number(d)
return "Day %d" % n
@staticmethod
def representation(d):
return {'day_number': JulianDayNumber._day_number(d)}
@staticmethod
def _day_number(d):
return (d - JulianDayNumber.first_ce_day).days + JulianDayNumber.first_ce_day_number
def date(self, n):
vd = self.first_ce_day + timedelta(days=n - self.first_ce_day_number)
return self.bless(vd)
Make a BCEDate for the dates before CE day 1.
|
from datetime import date as vanilla_date, timedelta
from .base import Calendar
from ..dates.bce import BCEDate
class JulianDayNumber(Calendar):
first_ce_day = vanilla_date(1, 1, 1)
first_ce_day_number = 1721423
display_name = "Julian Day Number"
@staticmethod
def date_display_string(d):
n = JulianDayNumber._day_number(d)
return "Day %d" % n
@staticmethod
def representation(d):
return {'day_number': JulianDayNumber._day_number(d)}
@staticmethod
def _day_number(d):
return (d - JulianDayNumber.first_ce_day).days + JulianDayNumber.first_ce_day_number
def date(self, n):
offset = n - self.first_ce_day_number
if offset >= 0:
vd = self.first_ce_day + timedelta(days=offset)
return JulianDayNumber().from_date(vd)
else:
d = BCEDate(1, 1, 1)
self.bless(d)
return d
|
<commit_before>from datetime import date as vanilla_date, timedelta
from .base import Calendar
class JulianDayNumber(Calendar):
first_ce_day = vanilla_date(1, 1, 1)
first_ce_day_number = 1721423
display_name = "Julian Day Number"
@staticmethod
def date_display_string(d):
n = JulianDayNumber._day_number(d)
return "Day %d" % n
@staticmethod
def representation(d):
return {'day_number': JulianDayNumber._day_number(d)}
@staticmethod
def _day_number(d):
return (d - JulianDayNumber.first_ce_day).days + JulianDayNumber.first_ce_day_number
def date(self, n):
vd = self.first_ce_day + timedelta(days=n - self.first_ce_day_number)
return self.bless(vd)
<commit_msg>Make a BCEDate for the dates before CE day 1.<commit_after>
|
from datetime import date as vanilla_date, timedelta
from .base import Calendar
from ..dates.bce import BCEDate
class JulianDayNumber(Calendar):
first_ce_day = vanilla_date(1, 1, 1)
first_ce_day_number = 1721423
display_name = "Julian Day Number"
@staticmethod
def date_display_string(d):
n = JulianDayNumber._day_number(d)
return "Day %d" % n
@staticmethod
def representation(d):
return {'day_number': JulianDayNumber._day_number(d)}
@staticmethod
def _day_number(d):
return (d - JulianDayNumber.first_ce_day).days + JulianDayNumber.first_ce_day_number
def date(self, n):
offset = n - self.first_ce_day_number
if offset >= 0:
vd = self.first_ce_day + timedelta(days=offset)
return JulianDayNumber().from_date(vd)
else:
d = BCEDate(1, 1, 1)
self.bless(d)
return d
|
from datetime import date as vanilla_date, timedelta
from .base import Calendar
class JulianDayNumber(Calendar):
first_ce_day = vanilla_date(1, 1, 1)
first_ce_day_number = 1721423
display_name = "Julian Day Number"
@staticmethod
def date_display_string(d):
n = JulianDayNumber._day_number(d)
return "Day %d" % n
@staticmethod
def representation(d):
return {'day_number': JulianDayNumber._day_number(d)}
@staticmethod
def _day_number(d):
return (d - JulianDayNumber.first_ce_day).days + JulianDayNumber.first_ce_day_number
def date(self, n):
vd = self.first_ce_day + timedelta(days=n - self.first_ce_day_number)
return self.bless(vd)
Make a BCEDate for the dates before CE day 1.from datetime import date as vanilla_date, timedelta
from .base import Calendar
from ..dates.bce import BCEDate
class JulianDayNumber(Calendar):
first_ce_day = vanilla_date(1, 1, 1)
first_ce_day_number = 1721423
display_name = "Julian Day Number"
@staticmethod
def date_display_string(d):
n = JulianDayNumber._day_number(d)
return "Day %d" % n
@staticmethod
def representation(d):
return {'day_number': JulianDayNumber._day_number(d)}
@staticmethod
def _day_number(d):
return (d - JulianDayNumber.first_ce_day).days + JulianDayNumber.first_ce_day_number
def date(self, n):
offset = n - self.first_ce_day_number
if offset >= 0:
vd = self.first_ce_day + timedelta(days=offset)
return JulianDayNumber().from_date(vd)
else:
d = BCEDate(1, 1, 1)
self.bless(d)
return d
|
<commit_before>from datetime import date as vanilla_date, timedelta
from .base import Calendar
class JulianDayNumber(Calendar):
first_ce_day = vanilla_date(1, 1, 1)
first_ce_day_number = 1721423
display_name = "Julian Day Number"
@staticmethod
def date_display_string(d):
n = JulianDayNumber._day_number(d)
return "Day %d" % n
@staticmethod
def representation(d):
return {'day_number': JulianDayNumber._day_number(d)}
@staticmethod
def _day_number(d):
return (d - JulianDayNumber.first_ce_day).days + JulianDayNumber.first_ce_day_number
def date(self, n):
vd = self.first_ce_day + timedelta(days=n - self.first_ce_day_number)
return self.bless(vd)
<commit_msg>Make a BCEDate for the dates before CE day 1.<commit_after>from datetime import date as vanilla_date, timedelta
from .base import Calendar
from ..dates.bce import BCEDate
class JulianDayNumber(Calendar):
first_ce_day = vanilla_date(1, 1, 1)
first_ce_day_number = 1721423
display_name = "Julian Day Number"
@staticmethod
def date_display_string(d):
n = JulianDayNumber._day_number(d)
return "Day %d" % n
@staticmethod
def representation(d):
return {'day_number': JulianDayNumber._day_number(d)}
@staticmethod
def _day_number(d):
return (d - JulianDayNumber.first_ce_day).days + JulianDayNumber.first_ce_day_number
def date(self, n):
offset = n - self.first_ce_day_number
if offset >= 0:
vd = self.first_ce_day + timedelta(days=offset)
return JulianDayNumber().from_date(vd)
else:
d = BCEDate(1, 1, 1)
self.bless(d)
return d
|
a31103d5001c7c6ebebddd25f9d1bb4ed0e0c2e9
|
polling_stations/apps/data_importers/management/commands/import_gosport.py
|
polling_stations/apps/data_importers/management/commands/import_gosport.py
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "GOS"
addresses_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Districts v1 (07 03 2022).csv"
stations_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Stations v1 (07 03 2022).csv"
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.addressline6 in ["PO12 2EH"]:
return None
return super().address_record_to_dict(record)
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "GOS"
addresses_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Districts v1 (07 03 2022).csv"
stations_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Stations v1 (07 03 2022).csv"
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.postcode in ["PO12 2EH"]:
return None
return super().address_record_to_dict(record)
|
Fix Gosport import script error
|
Fix Gosport import script error
|
Python
|
bsd-3-clause
|
DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "GOS"
addresses_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Districts v1 (07 03 2022).csv"
stations_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Stations v1 (07 03 2022).csv"
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.addressline6 in ["PO12 2EH"]:
return None
return super().address_record_to_dict(record)
Fix Gosport import script error
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "GOS"
addresses_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Districts v1 (07 03 2022).csv"
stations_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Stations v1 (07 03 2022).csv"
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.postcode in ["PO12 2EH"]:
return None
return super().address_record_to_dict(record)
|
<commit_before>from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "GOS"
addresses_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Districts v1 (07 03 2022).csv"
stations_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Stations v1 (07 03 2022).csv"
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.addressline6 in ["PO12 2EH"]:
return None
return super().address_record_to_dict(record)
<commit_msg>Fix Gosport import script error<commit_after>
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "GOS"
addresses_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Districts v1 (07 03 2022).csv"
stations_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Stations v1 (07 03 2022).csv"
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.postcode in ["PO12 2EH"]:
return None
return super().address_record_to_dict(record)
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "GOS"
addresses_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Districts v1 (07 03 2022).csv"
stations_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Stations v1 (07 03 2022).csv"
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.addressline6 in ["PO12 2EH"]:
return None
return super().address_record_to_dict(record)
Fix Gosport import script errorfrom data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "GOS"
addresses_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Districts v1 (07 03 2022).csv"
stations_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Stations v1 (07 03 2022).csv"
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.postcode in ["PO12 2EH"]:
return None
return super().address_record_to_dict(record)
|
<commit_before>from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "GOS"
addresses_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Districts v1 (07 03 2022).csv"
stations_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Stations v1 (07 03 2022).csv"
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.addressline6 in ["PO12 2EH"]:
return None
return super().address_record_to_dict(record)
<commit_msg>Fix Gosport import script error<commit_after>from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "GOS"
addresses_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Districts v1 (07 03 2022).csv"
stations_name = "2022-05-05/2022-03-07T15:47:28.644792/2022 Borough of Gosport - Democracy Club - Polling Stations v1 (07 03 2022).csv"
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.postcode in ["PO12 2EH"]:
return None
return super().address_record_to_dict(record)
|
5d9eabe588231444083d13dc50371ea6952d445e
|
mirrit/web/models.py
|
mirrit/web/models.py
|
from bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
|
from bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
github_access_token = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
|
Add github token to model
|
Add github token to model
|
Python
|
bsd-3-clause
|
1stvamp/mirrit
|
from bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
Add github token to model
|
from bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
github_access_token = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
|
<commit_before>from bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
<commit_msg>Add github token to model<commit_after>
|
from bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
github_access_token = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
|
from bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
Add github token to modelfrom bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
github_access_token = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
|
<commit_before>from bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
<commit_msg>Add github token to model<commit_after>from bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
github_access_token = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
|
2b5a3f0209d4a5fc5e821ca4f749931d8f6a18be
|
app/wmmetrics.py
|
app/wmmetrics.py
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/fdc")
def fdc_report_page():
return render_template('fdc-report.html')
if __name__ == "__main__":
app.run(debug=True)
|
import os
import sys
from flask import Flask, render_template, request
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(current_dir, '..'))
from wm_metrics import fdc
from wm_metrics import wmfr_photography
from wm_metrics import commons_cat_metrics
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/fdc")
def fdc_report_page():
return render_template('fdc-report.html')
if __name__ == "__main__":
app.run(debug=True)
|
Add imports for Wm_metrics module
|
Webapp: Add imports for Wm_metrics module
We have to manually add wm_metrics to the Python Path
|
Python
|
mit
|
danmichaelo/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics,danmichaelo/wm_metrics,danmichaelo/wm_metrics
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/fdc")
def fdc_report_page():
return render_template('fdc-report.html')
if __name__ == "__main__":
app.run(debug=True)
Webapp: Add imports for Wm_metrics module
We have to manually add wm_metrics to the Python Path
|
import os
import sys
from flask import Flask, render_template, request
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(current_dir, '..'))
from wm_metrics import fdc
from wm_metrics import wmfr_photography
from wm_metrics import commons_cat_metrics
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/fdc")
def fdc_report_page():
return render_template('fdc-report.html')
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/fdc")
def fdc_report_page():
return render_template('fdc-report.html')
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Webapp: Add imports for Wm_metrics module
We have to manually add wm_metrics to the Python Path<commit_after>
|
import os
import sys
from flask import Flask, render_template, request
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(current_dir, '..'))
from wm_metrics import fdc
from wm_metrics import wmfr_photography
from wm_metrics import commons_cat_metrics
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/fdc")
def fdc_report_page():
return render_template('fdc-report.html')
if __name__ == "__main__":
app.run(debug=True)
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/fdc")
def fdc_report_page():
return render_template('fdc-report.html')
if __name__ == "__main__":
app.run(debug=True)
Webapp: Add imports for Wm_metrics module
We have to manually add wm_metrics to the Python Pathimport os
import sys
from flask import Flask, render_template, request
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(current_dir, '..'))
from wm_metrics import fdc
from wm_metrics import wmfr_photography
from wm_metrics import commons_cat_metrics
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/fdc")
def fdc_report_page():
return render_template('fdc-report.html')
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/fdc")
def fdc_report_page():
return render_template('fdc-report.html')
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Webapp: Add imports for Wm_metrics module
We have to manually add wm_metrics to the Python Path<commit_after>import os
import sys
from flask import Flask, render_template, request
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(current_dir, '..'))
from wm_metrics import fdc
from wm_metrics import wmfr_photography
from wm_metrics import commons_cat_metrics
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/fdc")
def fdc_report_page():
return render_template('fdc-report.html')
if __name__ == "__main__":
app.run(debug=True)
|
85748ff761ef1373a9828829a447c7b83db246de
|
coliziune/teste/generate_ok.py
|
coliziune/teste/generate_ok.py
|
from sh import cp, rm
import sh
import os
SURSA_CPP = 'main.cpp'
PROBLEMA = 'coliziune'
cp('../' + SURSA_CPP, '.')
os.system('g++ ' + SURSA_CPP)
for i in range(1, 11):
print 'Testul ', i
cp(PROBLEMA + str(i) + '.in', PROBLEMA + '.in')
os.system('./a.out')
cp(PROBLEMA + '.out', PROBLEMA + str(i) + '.ok')
for extension in ['in', 'out']:
rm(PROBLEMA + '.' + extension)
rm(SURSA_CPP)
rm('a.out')
|
import subprocess
from sh import cp, rm
import sh
import os
SURSA_CPP = 'medie.cpp'
PROBLEMA = 'coliziune'
cp('../' + SURSA_CPP, '.')
os.system('g++ ' + SURSA_CPP)
for i in range(1, 11):
print 'Testul ', i
cp(PROBLEMA + str(i) + '.in', PROBLEMA + '.in')
print subprocess.check_output('time ./a.out', shell=True)
cp(PROBLEMA + '.out', PROBLEMA + str(i) + '.ok')
for extension in ['in', 'out']:
rm(PROBLEMA + '.' + extension)
rm(SURSA_CPP)
rm('a.out')
|
Print time it took to run the source
|
Print time it took to run the source
|
Python
|
mit
|
palcu/rotopcoder,palcu/rotopcoder
|
from sh import cp, rm
import sh
import os
SURSA_CPP = 'main.cpp'
PROBLEMA = 'coliziune'
cp('../' + SURSA_CPP, '.')
os.system('g++ ' + SURSA_CPP)
for i in range(1, 11):
print 'Testul ', i
cp(PROBLEMA + str(i) + '.in', PROBLEMA + '.in')
os.system('./a.out')
cp(PROBLEMA + '.out', PROBLEMA + str(i) + '.ok')
for extension in ['in', 'out']:
rm(PROBLEMA + '.' + extension)
rm(SURSA_CPP)
rm('a.out')
Print time it took to run the source
|
import subprocess
from sh import cp, rm
import sh
import os
SURSA_CPP = 'medie.cpp'
PROBLEMA = 'coliziune'
cp('../' + SURSA_CPP, '.')
os.system('g++ ' + SURSA_CPP)
for i in range(1, 11):
print 'Testul ', i
cp(PROBLEMA + str(i) + '.in', PROBLEMA + '.in')
print subprocess.check_output('time ./a.out', shell=True)
cp(PROBLEMA + '.out', PROBLEMA + str(i) + '.ok')
for extension in ['in', 'out']:
rm(PROBLEMA + '.' + extension)
rm(SURSA_CPP)
rm('a.out')
|
<commit_before>from sh import cp, rm
import sh
import os
SURSA_CPP = 'main.cpp'
PROBLEMA = 'coliziune'
cp('../' + SURSA_CPP, '.')
os.system('g++ ' + SURSA_CPP)
for i in range(1, 11):
print 'Testul ', i
cp(PROBLEMA + str(i) + '.in', PROBLEMA + '.in')
os.system('./a.out')
cp(PROBLEMA + '.out', PROBLEMA + str(i) + '.ok')
for extension in ['in', 'out']:
rm(PROBLEMA + '.' + extension)
rm(SURSA_CPP)
rm('a.out')
<commit_msg>Print time it took to run the source<commit_after>
|
import subprocess
from sh import cp, rm
import sh
import os
SURSA_CPP = 'medie.cpp'
PROBLEMA = 'coliziune'
cp('../' + SURSA_CPP, '.')
os.system('g++ ' + SURSA_CPP)
for i in range(1, 11):
print 'Testul ', i
cp(PROBLEMA + str(i) + '.in', PROBLEMA + '.in')
print subprocess.check_output('time ./a.out', shell=True)
cp(PROBLEMA + '.out', PROBLEMA + str(i) + '.ok')
for extension in ['in', 'out']:
rm(PROBLEMA + '.' + extension)
rm(SURSA_CPP)
rm('a.out')
|
from sh import cp, rm
import sh
import os
SURSA_CPP = 'main.cpp'
PROBLEMA = 'coliziune'
cp('../' + SURSA_CPP, '.')
os.system('g++ ' + SURSA_CPP)
for i in range(1, 11):
print 'Testul ', i
cp(PROBLEMA + str(i) + '.in', PROBLEMA + '.in')
os.system('./a.out')
cp(PROBLEMA + '.out', PROBLEMA + str(i) + '.ok')
for extension in ['in', 'out']:
rm(PROBLEMA + '.' + extension)
rm(SURSA_CPP)
rm('a.out')
Print time it took to run the sourceimport subprocess
from sh import cp, rm
import sh
import os
SURSA_CPP = 'medie.cpp'
PROBLEMA = 'coliziune'
cp('../' + SURSA_CPP, '.')
os.system('g++ ' + SURSA_CPP)
for i in range(1, 11):
print 'Testul ', i
cp(PROBLEMA + str(i) + '.in', PROBLEMA + '.in')
print subprocess.check_output('time ./a.out', shell=True)
cp(PROBLEMA + '.out', PROBLEMA + str(i) + '.ok')
for extension in ['in', 'out']:
rm(PROBLEMA + '.' + extension)
rm(SURSA_CPP)
rm('a.out')
|
<commit_before>from sh import cp, rm
import sh
import os
SURSA_CPP = 'main.cpp'
PROBLEMA = 'coliziune'
cp('../' + SURSA_CPP, '.')
os.system('g++ ' + SURSA_CPP)
for i in range(1, 11):
print 'Testul ', i
cp(PROBLEMA + str(i) + '.in', PROBLEMA + '.in')
os.system('./a.out')
cp(PROBLEMA + '.out', PROBLEMA + str(i) + '.ok')
for extension in ['in', 'out']:
rm(PROBLEMA + '.' + extension)
rm(SURSA_CPP)
rm('a.out')
<commit_msg>Print time it took to run the source<commit_after>import subprocess
from sh import cp, rm
import sh
import os
SURSA_CPP = 'medie.cpp'
PROBLEMA = 'coliziune'
cp('../' + SURSA_CPP, '.')
os.system('g++ ' + SURSA_CPP)
for i in range(1, 11):
print 'Testul ', i
cp(PROBLEMA + str(i) + '.in', PROBLEMA + '.in')
print subprocess.check_output('time ./a.out', shell=True)
cp(PROBLEMA + '.out', PROBLEMA + str(i) + '.ok')
for extension in ['in', 'out']:
rm(PROBLEMA + '.' + extension)
rm(SURSA_CPP)
rm('a.out')
|
87978c2b72777f3ef97cf1ae16f14797977bc34d
|
morenines/ignores.py
|
morenines/ignores.py
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
@classmethod
def read(cls, path):
ignores = cls()
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(cls, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
Make Ignores init accept default patterns
|
Make Ignores init accept default patterns
This also makes read() an instance method instead of a class method.
|
Python
|
mit
|
mcgid/morenines,mcgid/morenines
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
@classmethod
def read(cls, path):
ignores = cls()
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
Make Ignores init accept default patterns
This also makes read() an instance method instead of a class method.
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(cls, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
<commit_before>import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
@classmethod
def read(cls, path):
ignores = cls()
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
<commit_msg>Make Ignores init accept default patterns
This also makes read() an instance method instead of a class method.<commit_after>
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(cls, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
@classmethod
def read(cls, path):
ignores = cls()
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
Make Ignores init accept default patterns
This also makes read() an instance method instead of a class method.import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(cls, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
<commit_before>import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
@classmethod
def read(cls, path):
ignores = cls()
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
<commit_msg>Make Ignores init accept default patterns
This also makes read() an instance method instead of a class method.<commit_after>import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(cls, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
c3838132d3a4622ab4c9660f574e8219ac5e164b
|
mysite/core/tasks.py
|
mysite/core/tasks.py
|
from intercom.client import Client
from django.conf import settings
from celery import shared_task
intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN)
@shared_task
def intercom_event(event_name, created_at, email, metadata):
intercom.events.create(
event_name=event_name,
created_at=created_at,
email=email,
metadata=metadata
)
|
import logging
from intercom.client import Client
from django.conf import settings
from celery import shared_task
log = logging.getLogger(__name__)
intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN)
@shared_task
def intercom_event(event_name, created_at, email, metadata):
intercom.events.create(
event_name=event_name,
created_at=created_at,
email=email,
metadata=metadata
)
log.info("{}:{}:{}:{}".format(event_name, created_at, email, metadata))
|
Add logging for Intercom event generation
|
Add logging for Intercom event generation
|
Python
|
apache-2.0
|
raccoongang/socraticqs2,raccoongang/socraticqs2,cjlee112/socraticqs2,raccoongang/socraticqs2,cjlee112/socraticqs2,cjlee112/socraticqs2,raccoongang/socraticqs2,cjlee112/socraticqs2
|
from intercom.client import Client
from django.conf import settings
from celery import shared_task
intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN)
@shared_task
def intercom_event(event_name, created_at, email, metadata):
intercom.events.create(
event_name=event_name,
created_at=created_at,
email=email,
metadata=metadata
)
Add logging for Intercom event generation
|
import logging
from intercom.client import Client
from django.conf import settings
from celery import shared_task
log = logging.getLogger(__name__)
intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN)
@shared_task
def intercom_event(event_name, created_at, email, metadata):
intercom.events.create(
event_name=event_name,
created_at=created_at,
email=email,
metadata=metadata
)
log.info("{}:{}:{}:{}".format(event_name, created_at, email, metadata))
|
<commit_before>from intercom.client import Client
from django.conf import settings
from celery import shared_task
intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN)
@shared_task
def intercom_event(event_name, created_at, email, metadata):
intercom.events.create(
event_name=event_name,
created_at=created_at,
email=email,
metadata=metadata
)
<commit_msg>Add logging for Intercom event generation<commit_after>
|
import logging
from intercom.client import Client
from django.conf import settings
from celery import shared_task
log = logging.getLogger(__name__)
intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN)
@shared_task
def intercom_event(event_name, created_at, email, metadata):
intercom.events.create(
event_name=event_name,
created_at=created_at,
email=email,
metadata=metadata
)
log.info("{}:{}:{}:{}".format(event_name, created_at, email, metadata))
|
from intercom.client import Client
from django.conf import settings
from celery import shared_task
intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN)
@shared_task
def intercom_event(event_name, created_at, email, metadata):
intercom.events.create(
event_name=event_name,
created_at=created_at,
email=email,
metadata=metadata
)
Add logging for Intercom event generationimport logging
from intercom.client import Client
from django.conf import settings
from celery import shared_task
log = logging.getLogger(__name__)
intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN)
@shared_task
def intercom_event(event_name, created_at, email, metadata):
intercom.events.create(
event_name=event_name,
created_at=created_at,
email=email,
metadata=metadata
)
log.info("{}:{}:{}:{}".format(event_name, created_at, email, metadata))
|
<commit_before>from intercom.client import Client
from django.conf import settings
from celery import shared_task
intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN)
@shared_task
def intercom_event(event_name, created_at, email, metadata):
intercom.events.create(
event_name=event_name,
created_at=created_at,
email=email,
metadata=metadata
)
<commit_msg>Add logging for Intercom event generation<commit_after>import logging
from intercom.client import Client
from django.conf import settings
from celery import shared_task
log = logging.getLogger(__name__)
intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN)
@shared_task
def intercom_event(event_name, created_at, email, metadata):
intercom.events.create(
event_name=event_name,
created_at=created_at,
email=email,
metadata=metadata
)
log.info("{}:{}:{}:{}".format(event_name, created_at, email, metadata))
|
702217fee6e332b3d08902bb67f0725626f0c88d
|
test_defuzz.py
|
test_defuzz.py
|
from defuzz import Defuzzer
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
|
import itertools
import math
from defuzz import Defuzzer
from hypothesis import given
from hypothesis.strategies import floats, lists, tuples
from hypo_helpers import f
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
@given(lists(tuples(f, f)))
def test_hypo(points):
dfz = Defuzzer(round_digits=2)
dfz_points = [dfz.defuzz(pt) for pt in points]
# The output values should all be in the inputs.
assert all(pt in points for pt in dfz_points)
# No two unequal output values should be too close together.
if len(points) > 1:
for a, b in itertools.combinations(dfz_points, 2):
if a == b:
continue
distance = math.hypot(a[0] - b[0], a[1] - b[1])
assert distance > .01
|
Add a Hypothesis test for Defuzzer
|
Add a Hypothesis test for Defuzzer
|
Python
|
apache-2.0
|
nedbat/zellij
|
from defuzz import Defuzzer
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
Add a Hypothesis test for Defuzzer
|
import itertools
import math
from defuzz import Defuzzer
from hypothesis import given
from hypothesis.strategies import floats, lists, tuples
from hypo_helpers import f
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
@given(lists(tuples(f, f)))
def test_hypo(points):
dfz = Defuzzer(round_digits=2)
dfz_points = [dfz.defuzz(pt) for pt in points]
# The output values should all be in the inputs.
assert all(pt in points for pt in dfz_points)
# No two unequal output values should be too close together.
if len(points) > 1:
for a, b in itertools.combinations(dfz_points, 2):
if a == b:
continue
distance = math.hypot(a[0] - b[0], a[1] - b[1])
assert distance > .01
|
<commit_before>from defuzz import Defuzzer
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
<commit_msg>Add a Hypothesis test for Defuzzer<commit_after>
|
import itertools
import math
from defuzz import Defuzzer
from hypothesis import given
from hypothesis.strategies import floats, lists, tuples
from hypo_helpers import f
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
@given(lists(tuples(f, f)))
def test_hypo(points):
dfz = Defuzzer(round_digits=2)
dfz_points = [dfz.defuzz(pt) for pt in points]
# The output values should all be in the inputs.
assert all(pt in points for pt in dfz_points)
# No two unequal output values should be too close together.
if len(points) > 1:
for a, b in itertools.combinations(dfz_points, 2):
if a == b:
continue
distance = math.hypot(a[0] - b[0], a[1] - b[1])
assert distance > .01
|
from defuzz import Defuzzer
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
Add a Hypothesis test for Defuzzerimport itertools
import math
from defuzz import Defuzzer
from hypothesis import given
from hypothesis.strategies import floats, lists, tuples
from hypo_helpers import f
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
@given(lists(tuples(f, f)))
def test_hypo(points):
dfz = Defuzzer(round_digits=2)
dfz_points = [dfz.defuzz(pt) for pt in points]
# The output values should all be in the inputs.
assert all(pt in points for pt in dfz_points)
# No two unequal output values should be too close together.
if len(points) > 1:
for a, b in itertools.combinations(dfz_points, 2):
if a == b:
continue
distance = math.hypot(a[0] - b[0], a[1] - b[1])
assert distance > .01
|
<commit_before>from defuzz import Defuzzer
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
<commit_msg>Add a Hypothesis test for Defuzzer<commit_after>import itertools
import math
from defuzz import Defuzzer
from hypothesis import given
from hypothesis.strategies import floats, lists, tuples
from hypo_helpers import f
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
@given(lists(tuples(f, f)))
def test_hypo(points):
dfz = Defuzzer(round_digits=2)
dfz_points = [dfz.defuzz(pt) for pt in points]
# The output values should all be in the inputs.
assert all(pt in points for pt in dfz_points)
# No two unequal output values should be too close together.
if len(points) > 1:
for a, b in itertools.combinations(dfz_points, 2):
if a == b:
continue
distance = math.hypot(a[0] - b[0], a[1] - b[1])
assert distance > .01
|
12209289ebbdd5d7b93e6eb671582a36bec1d6c2
|
wdom/__init__.py
|
wdom/__init__.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
if sys.version_info < (3, 5):
import warnings
warnings.warn(
'Next version of WDOM will not support python 3.4. Please update to version 3.6+.' # noqa: E501
)
|
Raise warning when python version is < 3.5
|
Raise warning when python version is < 3.5
|
Python
|
mit
|
miyakogi/wdom,miyakogi/wdom,miyakogi/wdom
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
Raise warning when python version is < 3.5
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
if sys.version_info < (3, 5):
import warnings
warnings.warn(
'Next version of WDOM will not support python 3.4. Please update to version 3.6+.' # noqa: E501
)
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
<commit_msg>Raise warning when python version is < 3.5<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
if sys.version_info < (3, 5):
import warnings
warnings.warn(
'Next version of WDOM will not support python 3.4. Please update to version 3.6+.' # noqa: E501
)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
Raise warning when python version is < 3.5#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
if sys.version_info < (3, 5):
import warnings
warnings.warn(
'Next version of WDOM will not support python 3.4. Please update to version 3.6+.' # noqa: E501
)
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
<commit_msg>Raise warning when python version is < 3.5<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
if sys.version_info < (3, 5):
import warnings
warnings.warn(
'Next version of WDOM will not support python 3.4. Please update to version 3.6+.' # noqa: E501
)
|
c9868c56ca2aa4e5cfe2c9bad595b4d46d3a0137
|
bcbiovm/__init__.py
|
bcbiovm/__init__.py
|
"""Run bcbio-nextgen installations inside of virtual machines and containers.
"""
|
"""Run bcbio-nextgen installations inside of virtual machines
and containers.
"""
class Config(object):
"""Container for global config values."""
def __init__(self, config=None):
self._data = {}
if config:
self.update(config)
def __str__(self):
"""String representation for current task."""
return "<Config: {}>".format(self._data.keys())
def __setattr__(self, name, value):
"""Hook set attribute method for update the received item
from config.
"""
if name == "_data":
self.__dict__[name] = value
return
container = getattr(self, "_data", None)
if container:
container[name] = value
def __getattr__(self, name):
"""Hook for getting attribute from local storage"""
container = self.__dict__.get("_data")
if container and name in container:
return container[name]
raise AttributeError("'Config' object has no attribute '{}'"
.format(name))
def update(self, config):
"""Update fields from local storage."""
if not isinstance(config, dict):
raise ValueError("Argument `config` should be dictionary")
self._data.update(config)
config = Config()
|
Add container for global configurations
|
Add container for global configurations
|
Python
|
mit
|
alexandrucoman/bcbio-nextgen-vm,alexandrucoman/bcbio-nextgen-vm
|
"""Run bcbio-nextgen installations inside of virtual machines and containers.
"""
Add container for global configurations
|
"""Run bcbio-nextgen installations inside of virtual machines
and containers.
"""
class Config(object):
"""Container for global config values."""
def __init__(self, config=None):
self._data = {}
if config:
self.update(config)
def __str__(self):
"""String representation for current task."""
return "<Config: {}>".format(self._data.keys())
def __setattr__(self, name, value):
"""Hook set attribute method for update the received item
from config.
"""
if name == "_data":
self.__dict__[name] = value
return
container = getattr(self, "_data", None)
if container:
container[name] = value
def __getattr__(self, name):
"""Hook for getting attribute from local storage"""
container = self.__dict__.get("_data")
if container and name in container:
return container[name]
raise AttributeError("'Config' object has no attribute '{}'"
.format(name))
def update(self, config):
"""Update fields from local storage."""
if not isinstance(config, dict):
raise ValueError("Argument `config` should be dictionary")
self._data.update(config)
config = Config()
|
<commit_before>"""Run bcbio-nextgen installations inside of virtual machines and containers.
"""
<commit_msg>Add container for global configurations<commit_after>
|
"""Run bcbio-nextgen installations inside of virtual machines
and containers.
"""
class Config(object):
"""Container for global config values."""
def __init__(self, config=None):
self._data = {}
if config:
self.update(config)
def __str__(self):
"""String representation for current task."""
return "<Config: {}>".format(self._data.keys())
def __setattr__(self, name, value):
"""Hook set attribute method for update the received item
from config.
"""
if name == "_data":
self.__dict__[name] = value
return
container = getattr(self, "_data", None)
if container:
container[name] = value
def __getattr__(self, name):
"""Hook for getting attribute from local storage"""
container = self.__dict__.get("_data")
if container and name in container:
return container[name]
raise AttributeError("'Config' object has no attribute '{}'"
.format(name))
def update(self, config):
"""Update fields from local storage."""
if not isinstance(config, dict):
raise ValueError("Argument `config` should be dictionary")
self._data.update(config)
config = Config()
|
"""Run bcbio-nextgen installations inside of virtual machines and containers.
"""
Add container for global configurations"""Run bcbio-nextgen installations inside of virtual machines
and containers.
"""
class Config(object):
"""Container for global config values."""
def __init__(self, config=None):
self._data = {}
if config:
self.update(config)
def __str__(self):
"""String representation for current task."""
return "<Config: {}>".format(self._data.keys())
def __setattr__(self, name, value):
"""Hook set attribute method for update the received item
from config.
"""
if name == "_data":
self.__dict__[name] = value
return
container = getattr(self, "_data", None)
if container:
container[name] = value
def __getattr__(self, name):
"""Hook for getting attribute from local storage"""
container = self.__dict__.get("_data")
if container and name in container:
return container[name]
raise AttributeError("'Config' object has no attribute '{}'"
.format(name))
def update(self, config):
"""Update fields from local storage."""
if not isinstance(config, dict):
raise ValueError("Argument `config` should be dictionary")
self._data.update(config)
config = Config()
|
<commit_before>"""Run bcbio-nextgen installations inside of virtual machines and containers.
"""
<commit_msg>Add container for global configurations<commit_after>"""Run bcbio-nextgen installations inside of virtual machines
and containers.
"""
class Config(object):
"""Container for global config values."""
def __init__(self, config=None):
self._data = {}
if config:
self.update(config)
def __str__(self):
"""String representation for current task."""
return "<Config: {}>".format(self._data.keys())
def __setattr__(self, name, value):
"""Hook set attribute method for update the received item
from config.
"""
if name == "_data":
self.__dict__[name] = value
return
container = getattr(self, "_data", None)
if container:
container[name] = value
def __getattr__(self, name):
"""Hook for getting attribute from local storage"""
container = self.__dict__.get("_data")
if container and name in container:
return container[name]
raise AttributeError("'Config' object has no attribute '{}'"
.format(name))
def update(self, config):
"""Update fields from local storage."""
if not isinstance(config, dict):
raise ValueError("Argument `config` should be dictionary")
self._data.update(config)
config = Config()
|
8d5658ac5fa12381797b8c5f5fcd7f010aa3af0d
|
azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/__init__.py
|
azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/__init__.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .version import VERSION
__all__ = ['ManagedServiceIdentityClient']
__version__ = VERSION
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .version import VERSION
__version__ = VERSION
|
Fix incorrect CustomVision base init.py
|
Fix incorrect CustomVision base init.py
|
Python
|
mit
|
Azure/azure-sdk-for-python,Azure/azure-sdk-for-python,Azure/azure-sdk-for-python,Azure/azure-sdk-for-python
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .version import VERSION
__all__ = ['ManagedServiceIdentityClient']
__version__ = VERSION
Fix incorrect CustomVision base init.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .version import VERSION
__version__ = VERSION
|
<commit_before># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .version import VERSION
__all__ = ['ManagedServiceIdentityClient']
__version__ = VERSION
<commit_msg>Fix incorrect CustomVision base init.py<commit_after>
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .version import VERSION
__version__ = VERSION
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .version import VERSION
__all__ = ['ManagedServiceIdentityClient']
__version__ = VERSION
Fix incorrect CustomVision base init.py# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .version import VERSION
__version__ = VERSION
|
<commit_before># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .version import VERSION
__all__ = ['ManagedServiceIdentityClient']
__version__ = VERSION
<commit_msg>Fix incorrect CustomVision base init.py<commit_after># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .version import VERSION
__version__ = VERSION
|
d25d5569f51bf78d58eb461d16e9283b920b3fd7
|
skylines/api/views/__init__.py
|
skylines/api/views/__init__.py
|
from flask import request
from werkzeug.exceptions import Forbidden
from werkzeug.useragents import UserAgent
from .errors import register as register_error_handlers
from .airports import airports_blueprint
from .airspace import airspace_blueprint
from .mapitems import mapitems_blueprint
from .waves import waves_blueprint
def register(app):
"""
:param flask.Flask app: a Flask app
"""
@app.before_request
def require_user_agent():
"""
API requests require a ``User-Agent`` header
"""
user_agent = request.user_agent
assert isinstance(user_agent, UserAgent)
if not user_agent.string:
description = 'You don\'t have the permission to access the API with a User-Agent header.'
raise Forbidden(description)
register_error_handlers(app)
app.register_blueprint(airports_blueprint, url_prefix='/airports')
app.register_blueprint(airspace_blueprint, url_prefix='/airspace')
app.register_blueprint(mapitems_blueprint, url_prefix='/mapitems')
app.register_blueprint(waves_blueprint, url_prefix='/mountain_wave_project')
|
from flask import request
from werkzeug.exceptions import Forbidden
from werkzeug.useragents import UserAgent
def register(app):
"""
:param flask.Flask app: a Flask app
"""
from .errors import register as register_error_handlers
from .airports import airports_blueprint
from .airspace import airspace_blueprint
from .mapitems import mapitems_blueprint
from .waves import waves_blueprint
@app.before_request
def require_user_agent():
"""
API requests require a ``User-Agent`` header
"""
user_agent = request.user_agent
assert isinstance(user_agent, UserAgent)
if not user_agent.string:
description = 'You don\'t have the permission to access the API with a User-Agent header.'
raise Forbidden(description)
register_error_handlers(app)
app.register_blueprint(airports_blueprint, url_prefix='/airports')
app.register_blueprint(airspace_blueprint, url_prefix='/airspace')
app.register_blueprint(mapitems_blueprint, url_prefix='/mapitems')
app.register_blueprint(waves_blueprint, url_prefix='/mountain_wave_project')
|
Move blueprint imports into register() function
|
api/views: Move blueprint imports into register() function
|
Python
|
agpl-3.0
|
TobiasLohner/SkyLines,shadowoneau/skylines,skylines-project/skylines,RBE-Avionik/skylines,Harry-R/skylines,RBE-Avionik/skylines,snip/skylines,snip/skylines,skylines-project/skylines,skylines-project/skylines,skylines-project/skylines,shadowoneau/skylines,TobiasLohner/SkyLines,Harry-R/skylines,TobiasLohner/SkyLines,Turbo87/skylines,Harry-R/skylines,RBE-Avionik/skylines,Harry-R/skylines,shadowoneau/skylines,kerel-fs/skylines,Turbo87/skylines,Turbo87/skylines,RBE-Avionik/skylines,Turbo87/skylines,shadowoneau/skylines,kerel-fs/skylines,snip/skylines,kerel-fs/skylines
|
from flask import request
from werkzeug.exceptions import Forbidden
from werkzeug.useragents import UserAgent
from .errors import register as register_error_handlers
from .airports import airports_blueprint
from .airspace import airspace_blueprint
from .mapitems import mapitems_blueprint
from .waves import waves_blueprint
def register(app):
"""
:param flask.Flask app: a Flask app
"""
@app.before_request
def require_user_agent():
"""
API requests require a ``User-Agent`` header
"""
user_agent = request.user_agent
assert isinstance(user_agent, UserAgent)
if not user_agent.string:
description = 'You don\'t have the permission to access the API with a User-Agent header.'
raise Forbidden(description)
register_error_handlers(app)
app.register_blueprint(airports_blueprint, url_prefix='/airports')
app.register_blueprint(airspace_blueprint, url_prefix='/airspace')
app.register_blueprint(mapitems_blueprint, url_prefix='/mapitems')
app.register_blueprint(waves_blueprint, url_prefix='/mountain_wave_project')
api/views: Move blueprint imports into register() function
|
from flask import request
from werkzeug.exceptions import Forbidden
from werkzeug.useragents import UserAgent
def register(app):
"""
:param flask.Flask app: a Flask app
"""
from .errors import register as register_error_handlers
from .airports import airports_blueprint
from .airspace import airspace_blueprint
from .mapitems import mapitems_blueprint
from .waves import waves_blueprint
@app.before_request
def require_user_agent():
"""
API requests require a ``User-Agent`` header
"""
user_agent = request.user_agent
assert isinstance(user_agent, UserAgent)
if not user_agent.string:
description = 'You don\'t have the permission to access the API with a User-Agent header.'
raise Forbidden(description)
register_error_handlers(app)
app.register_blueprint(airports_blueprint, url_prefix='/airports')
app.register_blueprint(airspace_blueprint, url_prefix='/airspace')
app.register_blueprint(mapitems_blueprint, url_prefix='/mapitems')
app.register_blueprint(waves_blueprint, url_prefix='/mountain_wave_project')
|
<commit_before>from flask import request
from werkzeug.exceptions import Forbidden
from werkzeug.useragents import UserAgent
from .errors import register as register_error_handlers
from .airports import airports_blueprint
from .airspace import airspace_blueprint
from .mapitems import mapitems_blueprint
from .waves import waves_blueprint
def register(app):
"""
:param flask.Flask app: a Flask app
"""
@app.before_request
def require_user_agent():
"""
API requests require a ``User-Agent`` header
"""
user_agent = request.user_agent
assert isinstance(user_agent, UserAgent)
if not user_agent.string:
description = 'You don\'t have the permission to access the API with a User-Agent header.'
raise Forbidden(description)
register_error_handlers(app)
app.register_blueprint(airports_blueprint, url_prefix='/airports')
app.register_blueprint(airspace_blueprint, url_prefix='/airspace')
app.register_blueprint(mapitems_blueprint, url_prefix='/mapitems')
app.register_blueprint(waves_blueprint, url_prefix='/mountain_wave_project')
<commit_msg>api/views: Move blueprint imports into register() function<commit_after>
|
from flask import request
from werkzeug.exceptions import Forbidden
from werkzeug.useragents import UserAgent
def register(app):
"""
:param flask.Flask app: a Flask app
"""
from .errors import register as register_error_handlers
from .airports import airports_blueprint
from .airspace import airspace_blueprint
from .mapitems import mapitems_blueprint
from .waves import waves_blueprint
@app.before_request
def require_user_agent():
"""
API requests require a ``User-Agent`` header
"""
user_agent = request.user_agent
assert isinstance(user_agent, UserAgent)
if not user_agent.string:
description = 'You don\'t have the permission to access the API with a User-Agent header.'
raise Forbidden(description)
register_error_handlers(app)
app.register_blueprint(airports_blueprint, url_prefix='/airports')
app.register_blueprint(airspace_blueprint, url_prefix='/airspace')
app.register_blueprint(mapitems_blueprint, url_prefix='/mapitems')
app.register_blueprint(waves_blueprint, url_prefix='/mountain_wave_project')
|
from flask import request
from werkzeug.exceptions import Forbidden
from werkzeug.useragents import UserAgent
from .errors import register as register_error_handlers
from .airports import airports_blueprint
from .airspace import airspace_blueprint
from .mapitems import mapitems_blueprint
from .waves import waves_blueprint
def register(app):
"""
:param flask.Flask app: a Flask app
"""
@app.before_request
def require_user_agent():
"""
API requests require a ``User-Agent`` header
"""
user_agent = request.user_agent
assert isinstance(user_agent, UserAgent)
if not user_agent.string:
description = 'You don\'t have the permission to access the API with a User-Agent header.'
raise Forbidden(description)
register_error_handlers(app)
app.register_blueprint(airports_blueprint, url_prefix='/airports')
app.register_blueprint(airspace_blueprint, url_prefix='/airspace')
app.register_blueprint(mapitems_blueprint, url_prefix='/mapitems')
app.register_blueprint(waves_blueprint, url_prefix='/mountain_wave_project')
api/views: Move blueprint imports into register() functionfrom flask import request
from werkzeug.exceptions import Forbidden
from werkzeug.useragents import UserAgent
def register(app):
"""
:param flask.Flask app: a Flask app
"""
from .errors import register as register_error_handlers
from .airports import airports_blueprint
from .airspace import airspace_blueprint
from .mapitems import mapitems_blueprint
from .waves import waves_blueprint
@app.before_request
def require_user_agent():
"""
API requests require a ``User-Agent`` header
"""
user_agent = request.user_agent
assert isinstance(user_agent, UserAgent)
if not user_agent.string:
description = 'You don\'t have the permission to access the API with a User-Agent header.'
raise Forbidden(description)
register_error_handlers(app)
app.register_blueprint(airports_blueprint, url_prefix='/airports')
app.register_blueprint(airspace_blueprint, url_prefix='/airspace')
app.register_blueprint(mapitems_blueprint, url_prefix='/mapitems')
app.register_blueprint(waves_blueprint, url_prefix='/mountain_wave_project')
|
<commit_before>from flask import request
from werkzeug.exceptions import Forbidden
from werkzeug.useragents import UserAgent
from .errors import register as register_error_handlers
from .airports import airports_blueprint
from .airspace import airspace_blueprint
from .mapitems import mapitems_blueprint
from .waves import waves_blueprint
def register(app):
"""
:param flask.Flask app: a Flask app
"""
@app.before_request
def require_user_agent():
"""
API requests require a ``User-Agent`` header
"""
user_agent = request.user_agent
assert isinstance(user_agent, UserAgent)
if not user_agent.string:
description = 'You don\'t have the permission to access the API with a User-Agent header.'
raise Forbidden(description)
register_error_handlers(app)
app.register_blueprint(airports_blueprint, url_prefix='/airports')
app.register_blueprint(airspace_blueprint, url_prefix='/airspace')
app.register_blueprint(mapitems_blueprint, url_prefix='/mapitems')
app.register_blueprint(waves_blueprint, url_prefix='/mountain_wave_project')
<commit_msg>api/views: Move blueprint imports into register() function<commit_after>from flask import request
from werkzeug.exceptions import Forbidden
from werkzeug.useragents import UserAgent
def register(app):
"""
:param flask.Flask app: a Flask app
"""
from .errors import register as register_error_handlers
from .airports import airports_blueprint
from .airspace import airspace_blueprint
from .mapitems import mapitems_blueprint
from .waves import waves_blueprint
@app.before_request
def require_user_agent():
"""
API requests require a ``User-Agent`` header
"""
user_agent = request.user_agent
assert isinstance(user_agent, UserAgent)
if not user_agent.string:
description = 'You don\'t have the permission to access the API with a User-Agent header.'
raise Forbidden(description)
register_error_handlers(app)
app.register_blueprint(airports_blueprint, url_prefix='/airports')
app.register_blueprint(airspace_blueprint, url_prefix='/airspace')
app.register_blueprint(mapitems_blueprint, url_prefix='/mapitems')
app.register_blueprint(waves_blueprint, url_prefix='/mountain_wave_project')
|
34d895499f9e2a9fe35937ad511fc1adbfd8c12d
|
tailor/main.py
|
tailor/main.py
|
import os
import sys
PARENT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
sys.path.append(PARENT_PATH)
from antlr4 import FileStream, CommonTokenStream, ParseTreeWalker
from tailor.listeners.mainlistener import MainListener
from tailor.output.printer import Printer
from tailor.swift.swiftlexer import SwiftLexer
from tailor.swift.swiftparser import SwiftParser
def main(argv):
infile = FileStream(argv[1])
printer = Printer(filepath=argv[1])
lexer = SwiftLexer(infile)
stream = CommonTokenStream(lexer)
parser = SwiftParser(stream)
tree = parser.topLevel()
listener = MainListener(printer)
walker = ParseTreeWalker()
walker.walk(listener, tree)
if __name__ == '__main__':
main(sys.argv)
|
"""Perform static analysis on a Swift source file."""
import argparse
import os
import sys
PARENT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
sys.path.append(PARENT_PATH)
from antlr4 import FileStream, CommonTokenStream, ParseTreeWalker
from tailor.listeners.mainlistener import MainListener
from tailor.output.printer import Printer
from tailor.swift.swiftlexer import SwiftLexer
from tailor.swift.swiftparser import SwiftParser
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('infile', type=os.path.abspath,
help='Swift source file')
parser.add_argument('-l', '--max-lines', type=int, default=0,
help='maximum file line length')
return parser.parse_args()
def main():
args = parse_args()
printer = Printer(filepath=args.infile)
lexer = SwiftLexer(FileStream(args.infile))
stream = CommonTokenStream(lexer)
parser = SwiftParser(stream)
tree = parser.topLevel()
listener = MainListener(printer)
walker = ParseTreeWalker()
walker.walk(listener, tree)
if __name__ == '__main__':
main()
|
Set up argparse to accept params and display usage
|
Set up argparse to accept params and display usage
|
Python
|
mit
|
sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor
|
import os
import sys
PARENT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
sys.path.append(PARENT_PATH)
from antlr4 import FileStream, CommonTokenStream, ParseTreeWalker
from tailor.listeners.mainlistener import MainListener
from tailor.output.printer import Printer
from tailor.swift.swiftlexer import SwiftLexer
from tailor.swift.swiftparser import SwiftParser
def main(argv):
infile = FileStream(argv[1])
printer = Printer(filepath=argv[1])
lexer = SwiftLexer(infile)
stream = CommonTokenStream(lexer)
parser = SwiftParser(stream)
tree = parser.topLevel()
listener = MainListener(printer)
walker = ParseTreeWalker()
walker.walk(listener, tree)
if __name__ == '__main__':
main(sys.argv)
Set up argparse to accept params and display usage
|
"""Perform static analysis on a Swift source file."""
import argparse
import os
import sys
PARENT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
sys.path.append(PARENT_PATH)
from antlr4 import FileStream, CommonTokenStream, ParseTreeWalker
from tailor.listeners.mainlistener import MainListener
from tailor.output.printer import Printer
from tailor.swift.swiftlexer import SwiftLexer
from tailor.swift.swiftparser import SwiftParser
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('infile', type=os.path.abspath,
help='Swift source file')
parser.add_argument('-l', '--max-lines', type=int, default=0,
help='maximum file line length')
return parser.parse_args()
def main():
args = parse_args()
printer = Printer(filepath=args.infile)
lexer = SwiftLexer(FileStream(args.infile))
stream = CommonTokenStream(lexer)
parser = SwiftParser(stream)
tree = parser.topLevel()
listener = MainListener(printer)
walker = ParseTreeWalker()
walker.walk(listener, tree)
if __name__ == '__main__':
main()
|
<commit_before>import os
import sys
PARENT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
sys.path.append(PARENT_PATH)
from antlr4 import FileStream, CommonTokenStream, ParseTreeWalker
from tailor.listeners.mainlistener import MainListener
from tailor.output.printer import Printer
from tailor.swift.swiftlexer import SwiftLexer
from tailor.swift.swiftparser import SwiftParser
def main(argv):
infile = FileStream(argv[1])
printer = Printer(filepath=argv[1])
lexer = SwiftLexer(infile)
stream = CommonTokenStream(lexer)
parser = SwiftParser(stream)
tree = parser.topLevel()
listener = MainListener(printer)
walker = ParseTreeWalker()
walker.walk(listener, tree)
if __name__ == '__main__':
main(sys.argv)
<commit_msg>Set up argparse to accept params and display usage<commit_after>
|
"""Perform static analysis on a Swift source file."""
import argparse
import os
import sys
PARENT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
sys.path.append(PARENT_PATH)
from antlr4 import FileStream, CommonTokenStream, ParseTreeWalker
from tailor.listeners.mainlistener import MainListener
from tailor.output.printer import Printer
from tailor.swift.swiftlexer import SwiftLexer
from tailor.swift.swiftparser import SwiftParser
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('infile', type=os.path.abspath,
help='Swift source file')
parser.add_argument('-l', '--max-lines', type=int, default=0,
help='maximum file line length')
return parser.parse_args()
def main():
args = parse_args()
printer = Printer(filepath=args.infile)
lexer = SwiftLexer(FileStream(args.infile))
stream = CommonTokenStream(lexer)
parser = SwiftParser(stream)
tree = parser.topLevel()
listener = MainListener(printer)
walker = ParseTreeWalker()
walker.walk(listener, tree)
if __name__ == '__main__':
main()
|
import os
import sys
PARENT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
sys.path.append(PARENT_PATH)
from antlr4 import FileStream, CommonTokenStream, ParseTreeWalker
from tailor.listeners.mainlistener import MainListener
from tailor.output.printer import Printer
from tailor.swift.swiftlexer import SwiftLexer
from tailor.swift.swiftparser import SwiftParser
def main(argv):
infile = FileStream(argv[1])
printer = Printer(filepath=argv[1])
lexer = SwiftLexer(infile)
stream = CommonTokenStream(lexer)
parser = SwiftParser(stream)
tree = parser.topLevel()
listener = MainListener(printer)
walker = ParseTreeWalker()
walker.walk(listener, tree)
if __name__ == '__main__':
main(sys.argv)
Set up argparse to accept params and display usage"""Perform static analysis on a Swift source file."""
import argparse
import os
import sys
PARENT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
sys.path.append(PARENT_PATH)
from antlr4 import FileStream, CommonTokenStream, ParseTreeWalker
from tailor.listeners.mainlistener import MainListener
from tailor.output.printer import Printer
from tailor.swift.swiftlexer import SwiftLexer
from tailor.swift.swiftparser import SwiftParser
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('infile', type=os.path.abspath,
help='Swift source file')
parser.add_argument('-l', '--max-lines', type=int, default=0,
help='maximum file line length')
return parser.parse_args()
def main():
args = parse_args()
printer = Printer(filepath=args.infile)
lexer = SwiftLexer(FileStream(args.infile))
stream = CommonTokenStream(lexer)
parser = SwiftParser(stream)
tree = parser.topLevel()
listener = MainListener(printer)
walker = ParseTreeWalker()
walker.walk(listener, tree)
if __name__ == '__main__':
main()
|
<commit_before>import os
import sys
PARENT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
sys.path.append(PARENT_PATH)
from antlr4 import FileStream, CommonTokenStream, ParseTreeWalker
from tailor.listeners.mainlistener import MainListener
from tailor.output.printer import Printer
from tailor.swift.swiftlexer import SwiftLexer
from tailor.swift.swiftparser import SwiftParser
def main(argv):
infile = FileStream(argv[1])
printer = Printer(filepath=argv[1])
lexer = SwiftLexer(infile)
stream = CommonTokenStream(lexer)
parser = SwiftParser(stream)
tree = parser.topLevel()
listener = MainListener(printer)
walker = ParseTreeWalker()
walker.walk(listener, tree)
if __name__ == '__main__':
main(sys.argv)
<commit_msg>Set up argparse to accept params and display usage<commit_after>"""Perform static analysis on a Swift source file."""
import argparse
import os
import sys
PARENT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
sys.path.append(PARENT_PATH)
from antlr4 import FileStream, CommonTokenStream, ParseTreeWalker
from tailor.listeners.mainlistener import MainListener
from tailor.output.printer import Printer
from tailor.swift.swiftlexer import SwiftLexer
from tailor.swift.swiftparser import SwiftParser
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('infile', type=os.path.abspath,
help='Swift source file')
parser.add_argument('-l', '--max-lines', type=int, default=0,
help='maximum file line length')
return parser.parse_args()
def main():
args = parse_args()
printer = Printer(filepath=args.infile)
lexer = SwiftLexer(FileStream(args.infile))
stream = CommonTokenStream(lexer)
parser = SwiftParser(stream)
tree = parser.topLevel()
listener = MainListener(printer)
walker = ParseTreeWalker()
walker.walk(listener, tree)
if __name__ == '__main__':
main()
|
cae249ff553083e3546e26b08779baf6abc69a69
|
active_link/templatetags/active_link_tags.py
|
active_link/templatetags/active_link_tags.py
|
from django import VERSION as DJANGO_VERSION
from django import template
from django.conf import settings
if DJANGO_VERSION[0] == 1 and DJANGO_VERSION[1] <= 9:
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active_link(context, viewname, css_class=None, strict=None):
"""
Renders the given CSS class if the request path matches the path of the view.
:param context: The context where the tag was called. Used to access the request object.
:param viewname: The name of the view (include namespaces if any).
:param css_class: The CSS class to render.
:param strict: If True, the tag will perform an exact match with the request path.
:return:
"""
if css_class is None:
css_class = getattr(settings, 'ACTIVE_LINK_CSS_CLASS', 'active')
if strict is None:
strict = getattr(settings, 'ACTIVE_LINK_STRICT', False)
request = context.get('request')
if request is None:
# Can't work without the request object.
return ''
path = reverse(viewname)
if strict:
active = request.path == path
else:
active = path in request.path
if active:
return css_class
return ''
|
from django import VERSION as DJANGO_VERSION
from django import template
from django.conf import settings
if DJANGO_VERSION[0] == 1 and DJANGO_VERSION[1] <= 9:
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active_link(context, viewname, css_class=None, strict=None):
"""
Renders the given CSS class if the request path matches the path of the view.
:param context: The context where the tag was called. Used to access the request object.
:param viewname: The name of the view (include namespaces if any).
:param css_class: The CSS class to render.
:param strict: If True, the tag will perform an exact match with the request path.
:return:
"""
if css_class is None:
css_class = getattr(settings, 'ACTIVE_LINK_CSS_CLASS', 'active')
if strict is None:
strict = getattr(settings, 'ACTIVE_LINK_STRICT', False)
request = context.get('request')
if request is None:
# Can't work without the request object.
return ''
path = reverse(viewname)
if strict:
active = request.path == path
else:
active = request.path.find(path) == 0
if active:
return css_class
return ''
|
Improve how the active link is checked
|
Improve how the active link is checked
|
Python
|
bsd-3-clause
|
valerymelou/django-active-link
|
from django import VERSION as DJANGO_VERSION
from django import template
from django.conf import settings
if DJANGO_VERSION[0] == 1 and DJANGO_VERSION[1] <= 9:
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active_link(context, viewname, css_class=None, strict=None):
"""
Renders the given CSS class if the request path matches the path of the view.
:param context: The context where the tag was called. Used to access the request object.
:param viewname: The name of the view (include namespaces if any).
:param css_class: The CSS class to render.
:param strict: If True, the tag will perform an exact match with the request path.
:return:
"""
if css_class is None:
css_class = getattr(settings, 'ACTIVE_LINK_CSS_CLASS', 'active')
if strict is None:
strict = getattr(settings, 'ACTIVE_LINK_STRICT', False)
request = context.get('request')
if request is None:
# Can't work without the request object.
return ''
path = reverse(viewname)
if strict:
active = request.path == path
else:
active = path in request.path
if active:
return css_class
return ''
Improve how the active link is checked
|
from django import VERSION as DJANGO_VERSION
from django import template
from django.conf import settings
if DJANGO_VERSION[0] == 1 and DJANGO_VERSION[1] <= 9:
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active_link(context, viewname, css_class=None, strict=None):
"""
Renders the given CSS class if the request path matches the path of the view.
:param context: The context where the tag was called. Used to access the request object.
:param viewname: The name of the view (include namespaces if any).
:param css_class: The CSS class to render.
:param strict: If True, the tag will perform an exact match with the request path.
:return:
"""
if css_class is None:
css_class = getattr(settings, 'ACTIVE_LINK_CSS_CLASS', 'active')
if strict is None:
strict = getattr(settings, 'ACTIVE_LINK_STRICT', False)
request = context.get('request')
if request is None:
# Can't work without the request object.
return ''
path = reverse(viewname)
if strict:
active = request.path == path
else:
active = request.path.find(path) == 0
if active:
return css_class
return ''
|
<commit_before>from django import VERSION as DJANGO_VERSION
from django import template
from django.conf import settings
if DJANGO_VERSION[0] == 1 and DJANGO_VERSION[1] <= 9:
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active_link(context, viewname, css_class=None, strict=None):
"""
Renders the given CSS class if the request path matches the path of the view.
:param context: The context where the tag was called. Used to access the request object.
:param viewname: The name of the view (include namespaces if any).
:param css_class: The CSS class to render.
:param strict: If True, the tag will perform an exact match with the request path.
:return:
"""
if css_class is None:
css_class = getattr(settings, 'ACTIVE_LINK_CSS_CLASS', 'active')
if strict is None:
strict = getattr(settings, 'ACTIVE_LINK_STRICT', False)
request = context.get('request')
if request is None:
# Can't work without the request object.
return ''
path = reverse(viewname)
if strict:
active = request.path == path
else:
active = path in request.path
if active:
return css_class
return ''
<commit_msg>Improve how the active link is checked<commit_after>
|
from django import VERSION as DJANGO_VERSION
from django import template
from django.conf import settings
if DJANGO_VERSION[0] == 1 and DJANGO_VERSION[1] <= 9:
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active_link(context, viewname, css_class=None, strict=None):
"""
Renders the given CSS class if the request path matches the path of the view.
:param context: The context where the tag was called. Used to access the request object.
:param viewname: The name of the view (include namespaces if any).
:param css_class: The CSS class to render.
:param strict: If True, the tag will perform an exact match with the request path.
:return:
"""
if css_class is None:
css_class = getattr(settings, 'ACTIVE_LINK_CSS_CLASS', 'active')
if strict is None:
strict = getattr(settings, 'ACTIVE_LINK_STRICT', False)
request = context.get('request')
if request is None:
# Can't work without the request object.
return ''
path = reverse(viewname)
if strict:
active = request.path == path
else:
active = request.path.find(path) == 0
if active:
return css_class
return ''
|
from django import VERSION as DJANGO_VERSION
from django import template
from django.conf import settings
if DJANGO_VERSION[0] == 1 and DJANGO_VERSION[1] <= 9:
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active_link(context, viewname, css_class=None, strict=None):
"""
Renders the given CSS class if the request path matches the path of the view.
:param context: The context where the tag was called. Used to access the request object.
:param viewname: The name of the view (include namespaces if any).
:param css_class: The CSS class to render.
:param strict: If True, the tag will perform an exact match with the request path.
:return:
"""
if css_class is None:
css_class = getattr(settings, 'ACTIVE_LINK_CSS_CLASS', 'active')
if strict is None:
strict = getattr(settings, 'ACTIVE_LINK_STRICT', False)
request = context.get('request')
if request is None:
# Can't work without the request object.
return ''
path = reverse(viewname)
if strict:
active = request.path == path
else:
active = path in request.path
if active:
return css_class
return ''
Improve how the active link is checkedfrom django import VERSION as DJANGO_VERSION
from django import template
from django.conf import settings
if DJANGO_VERSION[0] == 1 and DJANGO_VERSION[1] <= 9:
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active_link(context, viewname, css_class=None, strict=None):
"""
Renders the given CSS class if the request path matches the path of the view.
:param context: The context where the tag was called. Used to access the request object.
:param viewname: The name of the view (include namespaces if any).
:param css_class: The CSS class to render.
:param strict: If True, the tag will perform an exact match with the request path.
:return:
"""
if css_class is None:
css_class = getattr(settings, 'ACTIVE_LINK_CSS_CLASS', 'active')
if strict is None:
strict = getattr(settings, 'ACTIVE_LINK_STRICT', False)
request = context.get('request')
if request is None:
# Can't work without the request object.
return ''
path = reverse(viewname)
if strict:
active = request.path == path
else:
active = request.path.find(path) == 0
if active:
return css_class
return ''
|
<commit_before>from django import VERSION as DJANGO_VERSION
from django import template
from django.conf import settings
if DJANGO_VERSION[0] == 1 and DJANGO_VERSION[1] <= 9:
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active_link(context, viewname, css_class=None, strict=None):
"""
Renders the given CSS class if the request path matches the path of the view.
:param context: The context where the tag was called. Used to access the request object.
:param viewname: The name of the view (include namespaces if any).
:param css_class: The CSS class to render.
:param strict: If True, the tag will perform an exact match with the request path.
:return:
"""
if css_class is None:
css_class = getattr(settings, 'ACTIVE_LINK_CSS_CLASS', 'active')
if strict is None:
strict = getattr(settings, 'ACTIVE_LINK_STRICT', False)
request = context.get('request')
if request is None:
# Can't work without the request object.
return ''
path = reverse(viewname)
if strict:
active = request.path == path
else:
active = path in request.path
if active:
return css_class
return ''
<commit_msg>Improve how the active link is checked<commit_after>from django import VERSION as DJANGO_VERSION
from django import template
from django.conf import settings
if DJANGO_VERSION[0] == 1 and DJANGO_VERSION[1] <= 9:
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active_link(context, viewname, css_class=None, strict=None):
"""
Renders the given CSS class if the request path matches the path of the view.
:param context: The context where the tag was called. Used to access the request object.
:param viewname: The name of the view (include namespaces if any).
:param css_class: The CSS class to render.
:param strict: If True, the tag will perform an exact match with the request path.
:return:
"""
if css_class is None:
css_class = getattr(settings, 'ACTIVE_LINK_CSS_CLASS', 'active')
if strict is None:
strict = getattr(settings, 'ACTIVE_LINK_STRICT', False)
request = context.get('request')
if request is None:
# Can't work without the request object.
return ''
path = reverse(viewname)
if strict:
active = request.path == path
else:
active = request.path.find(path) == 0
if active:
return css_class
return ''
|
2036b054a57cade23e513877e1dff9455c3d74c7
|
meetup/models.py
|
meetup/models.py
|
from django.db import models
from pizzaplace.models import PizzaPlace
from django.core.validators import RegexValidator
from meetup.services.meetup_api_lookup_agent import MeetupApiLookupAgent
from django.core.exceptions import ValidationError
def validate_urlname(link):
validator = RegexValidator(
regex='meetup\.com\/\w+(-\w+)*\/$',
message="Does not conform to Meetup Url",
code='invalid_url')
return validator(link)
def validate_meetup_exists(link):
looker = MeetupApiLookupAgent(link)
is_real = looker.is_real_meetup()
if not is_real:
raise ValidationError("That's not a meetup")
class Meetup(models.Model):
name = models.CharField(max_length=500, null=False, blank=False, default=None, unique=True)
meetup_link = models.URLField(max_length=500,
unique=True,
default=None,
validators=[validate_urlname, validate_meetup_exists])
pizza_places = models.ManyToManyField(PizzaPlace)
def __str__(self):
return self.name
|
from django.db import models
from pizzaplace.models import PizzaPlace
from django.core.validators import RegexValidator
from meetup.services.meetup_api_lookup_agent import MeetupApiLookupAgent
from django.core.exceptions import ValidationError
from model_utils.models import TimeStampedModel
def validate_urlname(link):
validator = RegexValidator(
regex='meetup\.com\/\w+(-\w+)*\/$',
message="Url should be in form 'meetup.com/meetup-name/'",
code='invalid_url')
return validator(link)
def validate_meetup_exists(link):
if not MeetupApiLookupAgent(link).meetup_exists():
raise ValidationError("Meetup not found on meetup.com")
class Meetup(TimeStampedModel):
name = models.CharField(max_length=500, null=False, blank=False, default=None, unique=True)
meetup_link = models.URLField(max_length=500,
unique=True,
default=None,
validators=[validate_urlname, validate_meetup_exists])
pizza_places = models.ManyToManyField(PizzaPlace)
def __str__(self):
return self.name
|
Improve meetup's validation error messages
|
Improve meetup's validation error messages
|
Python
|
mit
|
nicole-a-tesla/meetup.pizza,nicole-a-tesla/meetup.pizza
|
from django.db import models
from pizzaplace.models import PizzaPlace
from django.core.validators import RegexValidator
from meetup.services.meetup_api_lookup_agent import MeetupApiLookupAgent
from django.core.exceptions import ValidationError
def validate_urlname(link):
validator = RegexValidator(
regex='meetup\.com\/\w+(-\w+)*\/$',
message="Does not conform to Meetup Url",
code='invalid_url')
return validator(link)
def validate_meetup_exists(link):
looker = MeetupApiLookupAgent(link)
is_real = looker.is_real_meetup()
if not is_real:
raise ValidationError("That's not a meetup")
class Meetup(models.Model):
name = models.CharField(max_length=500, null=False, blank=False, default=None, unique=True)
meetup_link = models.URLField(max_length=500,
unique=True,
default=None,
validators=[validate_urlname, validate_meetup_exists])
pizza_places = models.ManyToManyField(PizzaPlace)
def __str__(self):
return self.name
Improve meetup's validation error messages
|
from django.db import models
from pizzaplace.models import PizzaPlace
from django.core.validators import RegexValidator
from meetup.services.meetup_api_lookup_agent import MeetupApiLookupAgent
from django.core.exceptions import ValidationError
from model_utils.models import TimeStampedModel
def validate_urlname(link):
validator = RegexValidator(
regex='meetup\.com\/\w+(-\w+)*\/$',
message="Url should be in form 'meetup.com/meetup-name/'",
code='invalid_url')
return validator(link)
def validate_meetup_exists(link):
if not MeetupApiLookupAgent(link).meetup_exists():
raise ValidationError("Meetup not found on meetup.com")
class Meetup(TimeStampedModel):
name = models.CharField(max_length=500, null=False, blank=False, default=None, unique=True)
meetup_link = models.URLField(max_length=500,
unique=True,
default=None,
validators=[validate_urlname, validate_meetup_exists])
pizza_places = models.ManyToManyField(PizzaPlace)
def __str__(self):
return self.name
|
<commit_before>from django.db import models
from pizzaplace.models import PizzaPlace
from django.core.validators import RegexValidator
from meetup.services.meetup_api_lookup_agent import MeetupApiLookupAgent
from django.core.exceptions import ValidationError
def validate_urlname(link):
validator = RegexValidator(
regex='meetup\.com\/\w+(-\w+)*\/$',
message="Does not conform to Meetup Url",
code='invalid_url')
return validator(link)
def validate_meetup_exists(link):
looker = MeetupApiLookupAgent(link)
is_real = looker.is_real_meetup()
if not is_real:
raise ValidationError("That's not a meetup")
class Meetup(models.Model):
name = models.CharField(max_length=500, null=False, blank=False, default=None, unique=True)
meetup_link = models.URLField(max_length=500,
unique=True,
default=None,
validators=[validate_urlname, validate_meetup_exists])
pizza_places = models.ManyToManyField(PizzaPlace)
def __str__(self):
return self.name
<commit_msg>Improve meetup's validation error messages<commit_after>
|
from django.db import models
from pizzaplace.models import PizzaPlace
from django.core.validators import RegexValidator
from meetup.services.meetup_api_lookup_agent import MeetupApiLookupAgent
from django.core.exceptions import ValidationError
from model_utils.models import TimeStampedModel
def validate_urlname(link):
validator = RegexValidator(
regex='meetup\.com\/\w+(-\w+)*\/$',
message="Url should be in form 'meetup.com/meetup-name/'",
code='invalid_url')
return validator(link)
def validate_meetup_exists(link):
if not MeetupApiLookupAgent(link).meetup_exists():
raise ValidationError("Meetup not found on meetup.com")
class Meetup(TimeStampedModel):
name = models.CharField(max_length=500, null=False, blank=False, default=None, unique=True)
meetup_link = models.URLField(max_length=500,
unique=True,
default=None,
validators=[validate_urlname, validate_meetup_exists])
pizza_places = models.ManyToManyField(PizzaPlace)
def __str__(self):
return self.name
|
from django.db import models
from pizzaplace.models import PizzaPlace
from django.core.validators import RegexValidator
from meetup.services.meetup_api_lookup_agent import MeetupApiLookupAgent
from django.core.exceptions import ValidationError
def validate_urlname(link):
validator = RegexValidator(
regex='meetup\.com\/\w+(-\w+)*\/$',
message="Does not conform to Meetup Url",
code='invalid_url')
return validator(link)
def validate_meetup_exists(link):
looker = MeetupApiLookupAgent(link)
is_real = looker.is_real_meetup()
if not is_real:
raise ValidationError("That's not a meetup")
class Meetup(models.Model):
name = models.CharField(max_length=500, null=False, blank=False, default=None, unique=True)
meetup_link = models.URLField(max_length=500,
unique=True,
default=None,
validators=[validate_urlname, validate_meetup_exists])
pizza_places = models.ManyToManyField(PizzaPlace)
def __str__(self):
return self.name
Improve meetup's validation error messagesfrom django.db import models
from pizzaplace.models import PizzaPlace
from django.core.validators import RegexValidator
from meetup.services.meetup_api_lookup_agent import MeetupApiLookupAgent
from django.core.exceptions import ValidationError
from model_utils.models import TimeStampedModel
def validate_urlname(link):
validator = RegexValidator(
regex='meetup\.com\/\w+(-\w+)*\/$',
message="Url should be in form 'meetup.com/meetup-name/'",
code='invalid_url')
return validator(link)
def validate_meetup_exists(link):
if not MeetupApiLookupAgent(link).meetup_exists():
raise ValidationError("Meetup not found on meetup.com")
class Meetup(TimeStampedModel):
name = models.CharField(max_length=500, null=False, blank=False, default=None, unique=True)
meetup_link = models.URLField(max_length=500,
unique=True,
default=None,
validators=[validate_urlname, validate_meetup_exists])
pizza_places = models.ManyToManyField(PizzaPlace)
def __str__(self):
return self.name
|
<commit_before>from django.db import models
from pizzaplace.models import PizzaPlace
from django.core.validators import RegexValidator
from meetup.services.meetup_api_lookup_agent import MeetupApiLookupAgent
from django.core.exceptions import ValidationError
def validate_urlname(link):
validator = RegexValidator(
regex='meetup\.com\/\w+(-\w+)*\/$',
message="Does not conform to Meetup Url",
code='invalid_url')
return validator(link)
def validate_meetup_exists(link):
looker = MeetupApiLookupAgent(link)
is_real = looker.is_real_meetup()
if not is_real:
raise ValidationError("That's not a meetup")
class Meetup(models.Model):
name = models.CharField(max_length=500, null=False, blank=False, default=None, unique=True)
meetup_link = models.URLField(max_length=500,
unique=True,
default=None,
validators=[validate_urlname, validate_meetup_exists])
pizza_places = models.ManyToManyField(PizzaPlace)
def __str__(self):
return self.name
<commit_msg>Improve meetup's validation error messages<commit_after>from django.db import models
from pizzaplace.models import PizzaPlace
from django.core.validators import RegexValidator
from meetup.services.meetup_api_lookup_agent import MeetupApiLookupAgent
from django.core.exceptions import ValidationError
from model_utils.models import TimeStampedModel
def validate_urlname(link):
validator = RegexValidator(
regex='meetup\.com\/\w+(-\w+)*\/$',
message="Url should be in form 'meetup.com/meetup-name/'",
code='invalid_url')
return validator(link)
def validate_meetup_exists(link):
if not MeetupApiLookupAgent(link).meetup_exists():
raise ValidationError("Meetup not found on meetup.com")
class Meetup(TimeStampedModel):
name = models.CharField(max_length=500, null=False, blank=False, default=None, unique=True)
meetup_link = models.URLField(max_length=500,
unique=True,
default=None,
validators=[validate_urlname, validate_meetup_exists])
pizza_places = models.ManyToManyField(PizzaPlace)
def __str__(self):
return self.name
|
494bfbd5f189cb0f61eecc7e86df53fb9f9a8203
|
src/ggrc/migrations/versions/20150911131818_29dca3ce0556_change_conclusion_dropdowns_options_in_.py
|
src/ggrc/migrations/versions/20150911131818_29dca3ce0556_change_conclusion_dropdowns_options_in_.py
|
"""Change conclusion dropdowns options in control assessment
Revision ID: 29dca3ce0556
Revises: 2d8a46b1e4a4
Create Date: 2015-09-11 13:18:18.269109
"""
# revision identifiers, used by Alembic.
revision = '29dca3ce0556'
down_revision = '2d8a46b1e4a4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
sql = """
UPDATE control_assessments
SET design = 'Ineffective'
WHERE
design = 'Material weakness' OR
design = 'Significant deficiency'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Ineffective'
WHERE
operationally = 'Material weakness' OR
operationally = 'Significant deficiency'
"""
op.execute(sql)
def downgrade():
sql = """
UPDATE control_assessments
SET design = 'Significant deficiency'
WHERE design = 'Ineffective'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Significant deficiency'
WHERE operationally = 'Ineffective'
"""
op.execute(sql)
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jost@reciprocitylabs.com
# Maintained By: jost@reciprocitylabs.com
"""Change conclusion dropdowns options in control assessment
Revision ID: 29dca3ce0556
Revises: 2d8a46b1e4a4
Create Date: 2015-09-11 13:18:18.269109
"""
# revision identifiers, used by Alembic.
revision = '29dca3ce0556'
down_revision = '2d8a46b1e4a4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
sql = """
UPDATE control_assessments
SET design = 'Ineffective'
WHERE
design = 'Material weakness' OR
design = 'Significant deficiency'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Ineffective'
WHERE
operationally = 'Material weakness' OR
operationally = 'Significant deficiency'
"""
op.execute(sql)
def downgrade():
sql = """
UPDATE control_assessments
SET design = 'Significant deficiency'
WHERE design = 'Ineffective'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Significant deficiency'
WHERE operationally = 'Ineffective'
"""
op.execute(sql)
|
Add copyright header to migration
|
Add copyright header to migration
|
Python
|
apache-2.0
|
NejcZupec/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,hasanalom/ggrc-core,hasanalom/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,hasanalom/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,hyperNURb/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core
|
"""Change conclusion dropdowns options in control assessment
Revision ID: 29dca3ce0556
Revises: 2d8a46b1e4a4
Create Date: 2015-09-11 13:18:18.269109
"""
# revision identifiers, used by Alembic.
revision = '29dca3ce0556'
down_revision = '2d8a46b1e4a4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
sql = """
UPDATE control_assessments
SET design = 'Ineffective'
WHERE
design = 'Material weakness' OR
design = 'Significant deficiency'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Ineffective'
WHERE
operationally = 'Material weakness' OR
operationally = 'Significant deficiency'
"""
op.execute(sql)
def downgrade():
sql = """
UPDATE control_assessments
SET design = 'Significant deficiency'
WHERE design = 'Ineffective'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Significant deficiency'
WHERE operationally = 'Ineffective'
"""
op.execute(sql)
Add copyright header to migration
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jost@reciprocitylabs.com
# Maintained By: jost@reciprocitylabs.com
"""Change conclusion dropdowns options in control assessment
Revision ID: 29dca3ce0556
Revises: 2d8a46b1e4a4
Create Date: 2015-09-11 13:18:18.269109
"""
# revision identifiers, used by Alembic.
revision = '29dca3ce0556'
down_revision = '2d8a46b1e4a4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
sql = """
UPDATE control_assessments
SET design = 'Ineffective'
WHERE
design = 'Material weakness' OR
design = 'Significant deficiency'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Ineffective'
WHERE
operationally = 'Material weakness' OR
operationally = 'Significant deficiency'
"""
op.execute(sql)
def downgrade():
sql = """
UPDATE control_assessments
SET design = 'Significant deficiency'
WHERE design = 'Ineffective'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Significant deficiency'
WHERE operationally = 'Ineffective'
"""
op.execute(sql)
|
<commit_before>
"""Change conclusion dropdowns options in control assessment
Revision ID: 29dca3ce0556
Revises: 2d8a46b1e4a4
Create Date: 2015-09-11 13:18:18.269109
"""
# revision identifiers, used by Alembic.
revision = '29dca3ce0556'
down_revision = '2d8a46b1e4a4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
sql = """
UPDATE control_assessments
SET design = 'Ineffective'
WHERE
design = 'Material weakness' OR
design = 'Significant deficiency'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Ineffective'
WHERE
operationally = 'Material weakness' OR
operationally = 'Significant deficiency'
"""
op.execute(sql)
def downgrade():
sql = """
UPDATE control_assessments
SET design = 'Significant deficiency'
WHERE design = 'Ineffective'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Significant deficiency'
WHERE operationally = 'Ineffective'
"""
op.execute(sql)
<commit_msg>Add copyright header to migration<commit_after>
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jost@reciprocitylabs.com
# Maintained By: jost@reciprocitylabs.com
"""Change conclusion dropdowns options in control assessment
Revision ID: 29dca3ce0556
Revises: 2d8a46b1e4a4
Create Date: 2015-09-11 13:18:18.269109
"""
# revision identifiers, used by Alembic.
revision = '29dca3ce0556'
down_revision = '2d8a46b1e4a4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
sql = """
UPDATE control_assessments
SET design = 'Ineffective'
WHERE
design = 'Material weakness' OR
design = 'Significant deficiency'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Ineffective'
WHERE
operationally = 'Material weakness' OR
operationally = 'Significant deficiency'
"""
op.execute(sql)
def downgrade():
sql = """
UPDATE control_assessments
SET design = 'Significant deficiency'
WHERE design = 'Ineffective'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Significant deficiency'
WHERE operationally = 'Ineffective'
"""
op.execute(sql)
|
"""Change conclusion dropdowns options in control assessment
Revision ID: 29dca3ce0556
Revises: 2d8a46b1e4a4
Create Date: 2015-09-11 13:18:18.269109
"""
# revision identifiers, used by Alembic.
revision = '29dca3ce0556'
down_revision = '2d8a46b1e4a4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
sql = """
UPDATE control_assessments
SET design = 'Ineffective'
WHERE
design = 'Material weakness' OR
design = 'Significant deficiency'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Ineffective'
WHERE
operationally = 'Material weakness' OR
operationally = 'Significant deficiency'
"""
op.execute(sql)
def downgrade():
sql = """
UPDATE control_assessments
SET design = 'Significant deficiency'
WHERE design = 'Ineffective'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Significant deficiency'
WHERE operationally = 'Ineffective'
"""
op.execute(sql)
Add copyright header to migration# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jost@reciprocitylabs.com
# Maintained By: jost@reciprocitylabs.com
"""Change conclusion dropdowns options in control assessment
Revision ID: 29dca3ce0556
Revises: 2d8a46b1e4a4
Create Date: 2015-09-11 13:18:18.269109
"""
# revision identifiers, used by Alembic.
revision = '29dca3ce0556'
down_revision = '2d8a46b1e4a4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
sql = """
UPDATE control_assessments
SET design = 'Ineffective'
WHERE
design = 'Material weakness' OR
design = 'Significant deficiency'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Ineffective'
WHERE
operationally = 'Material weakness' OR
operationally = 'Significant deficiency'
"""
op.execute(sql)
def downgrade():
sql = """
UPDATE control_assessments
SET design = 'Significant deficiency'
WHERE design = 'Ineffective'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Significant deficiency'
WHERE operationally = 'Ineffective'
"""
op.execute(sql)
|
<commit_before>
"""Change conclusion dropdowns options in control assessment
Revision ID: 29dca3ce0556
Revises: 2d8a46b1e4a4
Create Date: 2015-09-11 13:18:18.269109
"""
# revision identifiers, used by Alembic.
revision = '29dca3ce0556'
down_revision = '2d8a46b1e4a4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
sql = """
UPDATE control_assessments
SET design = 'Ineffective'
WHERE
design = 'Material weakness' OR
design = 'Significant deficiency'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Ineffective'
WHERE
operationally = 'Material weakness' OR
operationally = 'Significant deficiency'
"""
op.execute(sql)
def downgrade():
sql = """
UPDATE control_assessments
SET design = 'Significant deficiency'
WHERE design = 'Ineffective'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Significant deficiency'
WHERE operationally = 'Ineffective'
"""
op.execute(sql)
<commit_msg>Add copyright header to migration<commit_after># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jost@reciprocitylabs.com
# Maintained By: jost@reciprocitylabs.com
"""Change conclusion dropdowns options in control assessment
Revision ID: 29dca3ce0556
Revises: 2d8a46b1e4a4
Create Date: 2015-09-11 13:18:18.269109
"""
# revision identifiers, used by Alembic.
revision = '29dca3ce0556'
down_revision = '2d8a46b1e4a4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
sql = """
UPDATE control_assessments
SET design = 'Ineffective'
WHERE
design = 'Material weakness' OR
design = 'Significant deficiency'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Ineffective'
WHERE
operationally = 'Material weakness' OR
operationally = 'Significant deficiency'
"""
op.execute(sql)
def downgrade():
sql = """
UPDATE control_assessments
SET design = 'Significant deficiency'
WHERE design = 'Ineffective'
"""
op.execute(sql)
sql = """
UPDATE control_assessments
SET operationally = 'Significant deficiency'
WHERE operationally = 'Ineffective'
"""
op.execute(sql)
|
1d0c6389cc7d67acb5123555568d83f17f6b0ff0
|
templatetags/generic_markup.py
|
templatetags/generic_markup.py
|
"""
A filter which can perform many types of text-to-HTML conversion.
"""
from django.template import Library
from template_utils.markup import markup_filter
def apply_markup(value):
"""
Applies text-to-HTML conversion.
"""
return markup_filter(value)
register = Library()
register.filter(apply_markup)
|
"""
A filter which can perform many types of text-to-HTML conversion.
"""
from django.template import Library
from template_utils.markup import markup_filter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return markup_filter(value, filter_name=arg)
return markup_filter(value)
register = Library()
register.filter(apply_markup)
|
Make markup template filter take filter_name argument
|
Make markup template filter take filter_name argument
|
Python
|
bsd-3-clause
|
dongpoliu/django-template-utils
|
"""
A filter which can perform many types of text-to-HTML conversion.
"""
from django.template import Library
from template_utils.markup import markup_filter
def apply_markup(value):
"""
Applies text-to-HTML conversion.
"""
return markup_filter(value)
register = Library()
register.filter(apply_markup)
Make markup template filter take filter_name argument
|
"""
A filter which can perform many types of text-to-HTML conversion.
"""
from django.template import Library
from template_utils.markup import markup_filter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return markup_filter(value, filter_name=arg)
return markup_filter(value)
register = Library()
register.filter(apply_markup)
|
<commit_before>"""
A filter which can perform many types of text-to-HTML conversion.
"""
from django.template import Library
from template_utils.markup import markup_filter
def apply_markup(value):
"""
Applies text-to-HTML conversion.
"""
return markup_filter(value)
register = Library()
register.filter(apply_markup)
<commit_msg>Make markup template filter take filter_name argument<commit_after>
|
"""
A filter which can perform many types of text-to-HTML conversion.
"""
from django.template import Library
from template_utils.markup import markup_filter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return markup_filter(value, filter_name=arg)
return markup_filter(value)
register = Library()
register.filter(apply_markup)
|
"""
A filter which can perform many types of text-to-HTML conversion.
"""
from django.template import Library
from template_utils.markup import markup_filter
def apply_markup(value):
"""
Applies text-to-HTML conversion.
"""
return markup_filter(value)
register = Library()
register.filter(apply_markup)
Make markup template filter take filter_name argument"""
A filter which can perform many types of text-to-HTML conversion.
"""
from django.template import Library
from template_utils.markup import markup_filter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return markup_filter(value, filter_name=arg)
return markup_filter(value)
register = Library()
register.filter(apply_markup)
|
<commit_before>"""
A filter which can perform many types of text-to-HTML conversion.
"""
from django.template import Library
from template_utils.markup import markup_filter
def apply_markup(value):
"""
Applies text-to-HTML conversion.
"""
return markup_filter(value)
register = Library()
register.filter(apply_markup)
<commit_msg>Make markup template filter take filter_name argument<commit_after>"""
A filter which can perform many types of text-to-HTML conversion.
"""
from django.template import Library
from template_utils.markup import markup_filter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return markup_filter(value, filter_name=arg)
return markup_filter(value)
register = Library()
register.filter(apply_markup)
|
27d40996f0912a1b9b16afa0884f10b1504acce2
|
scoring_engine/web/__init__.py
|
scoring_engine/web/__init__.py
|
import os
from flask import Flask
app = Flask(__name__)
app.config.from_pyfile('settings.cfg')
app.secret_key = os.urandom(128)
from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about
app.register_blueprint(welcome.mod)
app.register_blueprint(scoreboard.mod)
app.register_blueprint(overview.mod)
app.register_blueprint(services.mod)
app.register_blueprint(admin.mod)
app.register_blueprint(auth.mod)
app.register_blueprint(profile.mod)
app.register_blueprint(api.mod)
app.register_blueprint(about.mod)
|
import os
import logging
from flask import Flask
app = Flask(__name__)
app.config.from_pyfile('settings.cfg')
app.secret_key = os.urandom(128)
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about
app.register_blueprint(welcome.mod)
app.register_blueprint(scoreboard.mod)
app.register_blueprint(overview.mod)
app.register_blueprint(services.mod)
app.register_blueprint(admin.mod)
app.register_blueprint(auth.mod)
app.register_blueprint(profile.mod)
app.register_blueprint(api.mod)
app.register_blueprint(about.mod)
|
Use error severity for flask output
|
Use error severity for flask output
|
Python
|
mit
|
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
|
import os
from flask import Flask
app = Flask(__name__)
app.config.from_pyfile('settings.cfg')
app.secret_key = os.urandom(128)
from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about
app.register_blueprint(welcome.mod)
app.register_blueprint(scoreboard.mod)
app.register_blueprint(overview.mod)
app.register_blueprint(services.mod)
app.register_blueprint(admin.mod)
app.register_blueprint(auth.mod)
app.register_blueprint(profile.mod)
app.register_blueprint(api.mod)
app.register_blueprint(about.mod)
Use error severity for flask output
|
import os
import logging
from flask import Flask
app = Flask(__name__)
app.config.from_pyfile('settings.cfg')
app.secret_key = os.urandom(128)
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about
app.register_blueprint(welcome.mod)
app.register_blueprint(scoreboard.mod)
app.register_blueprint(overview.mod)
app.register_blueprint(services.mod)
app.register_blueprint(admin.mod)
app.register_blueprint(auth.mod)
app.register_blueprint(profile.mod)
app.register_blueprint(api.mod)
app.register_blueprint(about.mod)
|
<commit_before>import os
from flask import Flask
app = Flask(__name__)
app.config.from_pyfile('settings.cfg')
app.secret_key = os.urandom(128)
from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about
app.register_blueprint(welcome.mod)
app.register_blueprint(scoreboard.mod)
app.register_blueprint(overview.mod)
app.register_blueprint(services.mod)
app.register_blueprint(admin.mod)
app.register_blueprint(auth.mod)
app.register_blueprint(profile.mod)
app.register_blueprint(api.mod)
app.register_blueprint(about.mod)
<commit_msg>Use error severity for flask output<commit_after>
|
import os
import logging
from flask import Flask
app = Flask(__name__)
app.config.from_pyfile('settings.cfg')
app.secret_key = os.urandom(128)
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about
app.register_blueprint(welcome.mod)
app.register_blueprint(scoreboard.mod)
app.register_blueprint(overview.mod)
app.register_blueprint(services.mod)
app.register_blueprint(admin.mod)
app.register_blueprint(auth.mod)
app.register_blueprint(profile.mod)
app.register_blueprint(api.mod)
app.register_blueprint(about.mod)
|
import os
from flask import Flask
app = Flask(__name__)
app.config.from_pyfile('settings.cfg')
app.secret_key = os.urandom(128)
from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about
app.register_blueprint(welcome.mod)
app.register_blueprint(scoreboard.mod)
app.register_blueprint(overview.mod)
app.register_blueprint(services.mod)
app.register_blueprint(admin.mod)
app.register_blueprint(auth.mod)
app.register_blueprint(profile.mod)
app.register_blueprint(api.mod)
app.register_blueprint(about.mod)
Use error severity for flask outputimport os
import logging
from flask import Flask
app = Flask(__name__)
app.config.from_pyfile('settings.cfg')
app.secret_key = os.urandom(128)
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about
app.register_blueprint(welcome.mod)
app.register_blueprint(scoreboard.mod)
app.register_blueprint(overview.mod)
app.register_blueprint(services.mod)
app.register_blueprint(admin.mod)
app.register_blueprint(auth.mod)
app.register_blueprint(profile.mod)
app.register_blueprint(api.mod)
app.register_blueprint(about.mod)
|
<commit_before>import os
from flask import Flask
app = Flask(__name__)
app.config.from_pyfile('settings.cfg')
app.secret_key = os.urandom(128)
from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about
app.register_blueprint(welcome.mod)
app.register_blueprint(scoreboard.mod)
app.register_blueprint(overview.mod)
app.register_blueprint(services.mod)
app.register_blueprint(admin.mod)
app.register_blueprint(auth.mod)
app.register_blueprint(profile.mod)
app.register_blueprint(api.mod)
app.register_blueprint(about.mod)
<commit_msg>Use error severity for flask output<commit_after>import os
import logging
from flask import Flask
app = Flask(__name__)
app.config.from_pyfile('settings.cfg')
app.secret_key = os.urandom(128)
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about
app.register_blueprint(welcome.mod)
app.register_blueprint(scoreboard.mod)
app.register_blueprint(overview.mod)
app.register_blueprint(services.mod)
app.register_blueprint(admin.mod)
app.register_blueprint(auth.mod)
app.register_blueprint(profile.mod)
app.register_blueprint(api.mod)
app.register_blueprint(about.mod)
|
a22b1019b8bcea2f6bdaf90635165d8d8968dee1
|
scripts/parse-include-paths.py
|
scripts/parse-include-paths.py
|
#!/usr/bin/env python3
"""
Deploys the website to a target directory supplied as an argument.
"""
import json
import os
import sys
def iterate_over(dict_or_list, result):
"""
Iterates recursively over nested lists and dictionaries
keeping track of all "path" values with the key "includePath"
within nested dictionaries.
"""
if isinstance(dict_or_list, list):
for child in dict_or_list:
iterate_over(child, result)
elif isinstance(dict_or_list, dict):
for key, value in dict_or_list.items():
if key == "includePath":
for child in value:
result.add(child["path"])
else:
iterate_over(value, result)
def main(arguments):
"""Main function of this program."""
workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir))
print("Workspace root: '{}'".format(workspace))
with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f:
data = json.loads(f.read())
result = set()
iterate_over(data, result)
result = [x.replace(workspace, "${workspaceRoot}") for x in result]
print(json.dumps(result, indent=0))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
#!/usr/bin/env python3
"""
Parses .vscode/.cmaketools.json to obtain a list of include paths.
These can then be subsequently pasted into .vscode/c_cpp_properties.json
to make intellisense work. This is script exists purely for convenience
and only needs to be used when the include paths change (e.g. when a new
dependency is added).
"""
import json
import os
import sys
def iterate_over(dict_or_list, result):
"""
Iterates recursively over nested lists and dictionaries
keeping track of all "path" values with the key "includePath"
within nested dictionaries.
"""
if isinstance(dict_or_list, list):
for child in dict_or_list:
iterate_over(child, result)
elif isinstance(dict_or_list, dict):
for key, value in dict_or_list.items():
if key == "includePath":
for child in value:
result.add(child["path"])
else:
iterate_over(value, result)
def main(arguments):
"""Main function of this program."""
workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir))
print("Workspace root: '{}'".format(workspace))
with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f:
data = json.loads(f.read())
result = set()
iterate_over(data, result)
result = [x.replace(workspace, "${workspaceRoot}") for x in result]
print(json.dumps(result, indent=0))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
Fix incorrect global comment in include path script
|
Fix incorrect global comment in include path script
|
Python
|
bsd-3-clause
|
Tom94/tev,Tom94/tev,Tom94/tev,Tom94/tev
|
#!/usr/bin/env python3
"""
Deploys the website to a target directory supplied as an argument.
"""
import json
import os
import sys
def iterate_over(dict_or_list, result):
"""
Iterates recursively over nested lists and dictionaries
keeping track of all "path" values with the key "includePath"
within nested dictionaries.
"""
if isinstance(dict_or_list, list):
for child in dict_or_list:
iterate_over(child, result)
elif isinstance(dict_or_list, dict):
for key, value in dict_or_list.items():
if key == "includePath":
for child in value:
result.add(child["path"])
else:
iterate_over(value, result)
def main(arguments):
"""Main function of this program."""
workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir))
print("Workspace root: '{}'".format(workspace))
with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f:
data = json.loads(f.read())
result = set()
iterate_over(data, result)
result = [x.replace(workspace, "${workspaceRoot}") for x in result]
print(json.dumps(result, indent=0))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
Fix incorrect global comment in include path script
|
#!/usr/bin/env python3
"""
Parses .vscode/.cmaketools.json to obtain a list of include paths.
These can then be subsequently pasted into .vscode/c_cpp_properties.json
to make intellisense work. This is script exists purely for convenience
and only needs to be used when the include paths change (e.g. when a new
dependency is added).
"""
import json
import os
import sys
def iterate_over(dict_or_list, result):
"""
Iterates recursively over nested lists and dictionaries
keeping track of all "path" values with the key "includePath"
within nested dictionaries.
"""
if isinstance(dict_or_list, list):
for child in dict_or_list:
iterate_over(child, result)
elif isinstance(dict_or_list, dict):
for key, value in dict_or_list.items():
if key == "includePath":
for child in value:
result.add(child["path"])
else:
iterate_over(value, result)
def main(arguments):
"""Main function of this program."""
workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir))
print("Workspace root: '{}'".format(workspace))
with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f:
data = json.loads(f.read())
result = set()
iterate_over(data, result)
result = [x.replace(workspace, "${workspaceRoot}") for x in result]
print(json.dumps(result, indent=0))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
<commit_before>#!/usr/bin/env python3
"""
Deploys the website to a target directory supplied as an argument.
"""
import json
import os
import sys
def iterate_over(dict_or_list, result):
"""
Iterates recursively over nested lists and dictionaries
keeping track of all "path" values with the key "includePath"
within nested dictionaries.
"""
if isinstance(dict_or_list, list):
for child in dict_or_list:
iterate_over(child, result)
elif isinstance(dict_or_list, dict):
for key, value in dict_or_list.items():
if key == "includePath":
for child in value:
result.add(child["path"])
else:
iterate_over(value, result)
def main(arguments):
"""Main function of this program."""
workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir))
print("Workspace root: '{}'".format(workspace))
with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f:
data = json.loads(f.read())
result = set()
iterate_over(data, result)
result = [x.replace(workspace, "${workspaceRoot}") for x in result]
print(json.dumps(result, indent=0))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
<commit_msg>Fix incorrect global comment in include path script<commit_after>
|
#!/usr/bin/env python3
"""
Parses .vscode/.cmaketools.json to obtain a list of include paths.
These can then be subsequently pasted into .vscode/c_cpp_properties.json
to make intellisense work. This is script exists purely for convenience
and only needs to be used when the include paths change (e.g. when a new
dependency is added).
"""
import json
import os
import sys
def iterate_over(dict_or_list, result):
"""
Iterates recursively over nested lists and dictionaries
keeping track of all "path" values with the key "includePath"
within nested dictionaries.
"""
if isinstance(dict_or_list, list):
for child in dict_or_list:
iterate_over(child, result)
elif isinstance(dict_or_list, dict):
for key, value in dict_or_list.items():
if key == "includePath":
for child in value:
result.add(child["path"])
else:
iterate_over(value, result)
def main(arguments):
"""Main function of this program."""
workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir))
print("Workspace root: '{}'".format(workspace))
with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f:
data = json.loads(f.read())
result = set()
iterate_over(data, result)
result = [x.replace(workspace, "${workspaceRoot}") for x in result]
print(json.dumps(result, indent=0))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
#!/usr/bin/env python3
"""
Deploys the website to a target directory supplied as an argument.
"""
import json
import os
import sys
def iterate_over(dict_or_list, result):
"""
Iterates recursively over nested lists and dictionaries
keeping track of all "path" values with the key "includePath"
within nested dictionaries.
"""
if isinstance(dict_or_list, list):
for child in dict_or_list:
iterate_over(child, result)
elif isinstance(dict_or_list, dict):
for key, value in dict_or_list.items():
if key == "includePath":
for child in value:
result.add(child["path"])
else:
iterate_over(value, result)
def main(arguments):
"""Main function of this program."""
workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir))
print("Workspace root: '{}'".format(workspace))
with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f:
data = json.loads(f.read())
result = set()
iterate_over(data, result)
result = [x.replace(workspace, "${workspaceRoot}") for x in result]
print(json.dumps(result, indent=0))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
Fix incorrect global comment in include path script#!/usr/bin/env python3
"""
Parses .vscode/.cmaketools.json to obtain a list of include paths.
These can then be subsequently pasted into .vscode/c_cpp_properties.json
to make intellisense work. This is script exists purely for convenience
and only needs to be used when the include paths change (e.g. when a new
dependency is added).
"""
import json
import os
import sys
def iterate_over(dict_or_list, result):
"""
Iterates recursively over nested lists and dictionaries
keeping track of all "path" values with the key "includePath"
within nested dictionaries.
"""
if isinstance(dict_or_list, list):
for child in dict_or_list:
iterate_over(child, result)
elif isinstance(dict_or_list, dict):
for key, value in dict_or_list.items():
if key == "includePath":
for child in value:
result.add(child["path"])
else:
iterate_over(value, result)
def main(arguments):
"""Main function of this program."""
workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir))
print("Workspace root: '{}'".format(workspace))
with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f:
data = json.loads(f.read())
result = set()
iterate_over(data, result)
result = [x.replace(workspace, "${workspaceRoot}") for x in result]
print(json.dumps(result, indent=0))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
<commit_before>#!/usr/bin/env python3
"""
Deploys the website to a target directory supplied as an argument.
"""
import json
import os
import sys
def iterate_over(dict_or_list, result):
"""
Iterates recursively over nested lists and dictionaries
keeping track of all "path" values with the key "includePath"
within nested dictionaries.
"""
if isinstance(dict_or_list, list):
for child in dict_or_list:
iterate_over(child, result)
elif isinstance(dict_or_list, dict):
for key, value in dict_or_list.items():
if key == "includePath":
for child in value:
result.add(child["path"])
else:
iterate_over(value, result)
def main(arguments):
"""Main function of this program."""
workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir))
print("Workspace root: '{}'".format(workspace))
with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f:
data = json.loads(f.read())
result = set()
iterate_over(data, result)
result = [x.replace(workspace, "${workspaceRoot}") for x in result]
print(json.dumps(result, indent=0))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
<commit_msg>Fix incorrect global comment in include path script<commit_after>#!/usr/bin/env python3
"""
Parses .vscode/.cmaketools.json to obtain a list of include paths.
These can then be subsequently pasted into .vscode/c_cpp_properties.json
to make intellisense work. This is script exists purely for convenience
and only needs to be used when the include paths change (e.g. when a new
dependency is added).
"""
import json
import os
import sys
def iterate_over(dict_or_list, result):
"""
Iterates recursively over nested lists and dictionaries
keeping track of all "path" values with the key "includePath"
within nested dictionaries.
"""
if isinstance(dict_or_list, list):
for child in dict_or_list:
iterate_over(child, result)
elif isinstance(dict_or_list, dict):
for key, value in dict_or_list.items():
if key == "includePath":
for child in value:
result.add(child["path"])
else:
iterate_over(value, result)
def main(arguments):
"""Main function of this program."""
workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir))
print("Workspace root: '{}'".format(workspace))
with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f:
data = json.loads(f.read())
result = set()
iterate_over(data, result)
result = [x.replace(workspace, "${workspaceRoot}") for x in result]
print(json.dumps(result, indent=0))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
06d9ada18ef8d201383317e6e0fac078a01ab206
|
tests/test_vector2_equality.py
|
tests/test_vector2_equality.py
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
def test_equal():
test_vector_1 = Vector2(50, 800)
test_vector_2 = Vector2(50, 800)
assert test_vector_1 == test_vector_2
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
Replace test for == with an Hypothesis test
|
tests/equality: Replace test for == with an Hypothesis test
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
def test_equal():
test_vector_1 = Vector2(50, 800)
test_vector_2 = Vector2(50, 800)
assert test_vector_1 == test_vector_2
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
tests/equality: Replace test for == with an Hypothesis test
|
from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
<commit_before>from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
def test_equal():
test_vector_1 = Vector2(50, 800)
test_vector_2 = Vector2(50, 800)
assert test_vector_1 == test_vector_2
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
<commit_msg>tests/equality: Replace test for == with an Hypothesis test<commit_after>
|
from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
def test_equal():
test_vector_1 = Vector2(50, 800)
test_vector_2 = Vector2(50, 800)
assert test_vector_1 == test_vector_2
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
tests/equality: Replace test for == with an Hypothesis testfrom hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
<commit_before>from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
def test_equal():
test_vector_1 = Vector2(50, 800)
test_vector_2 = Vector2(50, 800)
assert test_vector_1 == test_vector_2
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
<commit_msg>tests/equality: Replace test for == with an Hypothesis test<commit_after>from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
2f7551b953bb225b68880cdeec87236ea6453b12
|
tohu/v6/set_special_methods.py
|
tohu/v6/set_special_methods.py
|
"""
This module is not meant to be imported directly.
Its purpose is to patch the TohuBaseGenerator class
so that its special methods __add__, __mul__ etc.
support other generators as arguments.
"""
from operator import add, mul, gt, ge, lt, le, eq
from .base import TohuBaseGenerator
from .primitive_generators import GeoJSONGeolocation
from .derived_generators import GetAttribute
__all__ = []
def split_geolocation(self):
attributes = ['lon', 'lat'] + self.include_attributes
return tuple(GetAttribute(self, attr_name) for attr_name in attributes)
GeoJSONGeolocation.split = split_geolocation
|
"""
This module is not meant to be imported directly.
Its purpose is to patch the TohuBaseGenerator class
so that its special methods __add__, __mul__ etc.
support other generators as arguments.
"""
from .base import TohuBaseGenerator
from .primitive_generators import GeoJSONGeolocation, as_tohu_generator
from .derived_generators import Apply, GetAttribute
from operator import add, mul, gt, ge, lt, le, eq
__all__ = []
def add_generators(self, other):
return Apply(add, self, as_tohu_generator(other))
def radd_generators(self, other):
return Apply(add, as_tohu_generator(other), self)
def mul_generators(self, other):
return Apply(mul, self, as_tohu_generator(other))
def rmul_generators(self, other):
return Apply(mul, as_tohu_generator(other), self)
def eq_generators(self, other):
return Apply(eq, self, as_tohu_generator(other))
def lt_generators(self, other):
return Apply(lt, self, as_tohu_generator(other))
def le_generators(self, other):
return Apply(le, self, as_tohu_generator(other))
def gt_generators(self, other):
return Apply(gt, self, as_tohu_generator(other))
def ge_generators(self, other):
return Apply(ge, self, as_tohu_generator(other))
# Patch TohuBaseGenerator with the new methods
TohuBaseGenerator.__add__ = add_generators
TohuBaseGenerator.__radd__ = radd_generators
TohuBaseGenerator.__mul__ = mul_generators
TohuBaseGenerator.__rmul__ = rmul_generators
TohuBaseGenerator.__eq__ = eq_generators
TohuBaseGenerator.__lt__ = lt_generators
TohuBaseGenerator.__le__ = le_generators
TohuBaseGenerator.__gt__ = gt_generators
TohuBaseGenerator.__ge__ = ge_generators
def split_geolocation(self):
attributes = ['lon', 'lat'] + self.include_attributes
return tuple(GetAttribute(self, attr_name) for attr_name in attributes)
GeoJSONGeolocation.split = split_geolocation
|
Set special methods on TohuBaseGenerator to allow e.g. adding two generators
|
Set special methods on TohuBaseGenerator to allow e.g. adding two generators
|
Python
|
mit
|
maxalbert/tohu
|
"""
This module is not meant to be imported directly.
Its purpose is to patch the TohuBaseGenerator class
so that its special methods __add__, __mul__ etc.
support other generators as arguments.
"""
from operator import add, mul, gt, ge, lt, le, eq
from .base import TohuBaseGenerator
from .primitive_generators import GeoJSONGeolocation
from .derived_generators import GetAttribute
__all__ = []
def split_geolocation(self):
attributes = ['lon', 'lat'] + self.include_attributes
return tuple(GetAttribute(self, attr_name) for attr_name in attributes)
GeoJSONGeolocation.split = split_geolocation
Set special methods on TohuBaseGenerator to allow e.g. adding two generators
|
"""
This module is not meant to be imported directly.
Its purpose is to patch the TohuBaseGenerator class
so that its special methods __add__, __mul__ etc.
support other generators as arguments.
"""
from .base import TohuBaseGenerator
from .primitive_generators import GeoJSONGeolocation, as_tohu_generator
from .derived_generators import Apply, GetAttribute
from operator import add, mul, gt, ge, lt, le, eq
__all__ = []
def add_generators(self, other):
return Apply(add, self, as_tohu_generator(other))
def radd_generators(self, other):
return Apply(add, as_tohu_generator(other), self)
def mul_generators(self, other):
return Apply(mul, self, as_tohu_generator(other))
def rmul_generators(self, other):
return Apply(mul, as_tohu_generator(other), self)
def eq_generators(self, other):
return Apply(eq, self, as_tohu_generator(other))
def lt_generators(self, other):
return Apply(lt, self, as_tohu_generator(other))
def le_generators(self, other):
return Apply(le, self, as_tohu_generator(other))
def gt_generators(self, other):
return Apply(gt, self, as_tohu_generator(other))
def ge_generators(self, other):
return Apply(ge, self, as_tohu_generator(other))
# Patch TohuBaseGenerator with the new methods
TohuBaseGenerator.__add__ = add_generators
TohuBaseGenerator.__radd__ = radd_generators
TohuBaseGenerator.__mul__ = mul_generators
TohuBaseGenerator.__rmul__ = rmul_generators
TohuBaseGenerator.__eq__ = eq_generators
TohuBaseGenerator.__lt__ = lt_generators
TohuBaseGenerator.__le__ = le_generators
TohuBaseGenerator.__gt__ = gt_generators
TohuBaseGenerator.__ge__ = ge_generators
def split_geolocation(self):
attributes = ['lon', 'lat'] + self.include_attributes
return tuple(GetAttribute(self, attr_name) for attr_name in attributes)
GeoJSONGeolocation.split = split_geolocation
|
<commit_before>"""
This module is not meant to be imported directly.
Its purpose is to patch the TohuBaseGenerator class
so that its special methods __add__, __mul__ etc.
support other generators as arguments.
"""
from operator import add, mul, gt, ge, lt, le, eq
from .base import TohuBaseGenerator
from .primitive_generators import GeoJSONGeolocation
from .derived_generators import GetAttribute
__all__ = []
def split_geolocation(self):
attributes = ['lon', 'lat'] + self.include_attributes
return tuple(GetAttribute(self, attr_name) for attr_name in attributes)
GeoJSONGeolocation.split = split_geolocation
<commit_msg>Set special methods on TohuBaseGenerator to allow e.g. adding two generators<commit_after>
|
"""
This module is not meant to be imported directly.
Its purpose is to patch the TohuBaseGenerator class
so that its special methods __add__, __mul__ etc.
support other generators as arguments.
"""
from .base import TohuBaseGenerator
from .primitive_generators import GeoJSONGeolocation, as_tohu_generator
from .derived_generators import Apply, GetAttribute
from operator import add, mul, gt, ge, lt, le, eq
__all__ = []
def add_generators(self, other):
return Apply(add, self, as_tohu_generator(other))
def radd_generators(self, other):
return Apply(add, as_tohu_generator(other), self)
def mul_generators(self, other):
return Apply(mul, self, as_tohu_generator(other))
def rmul_generators(self, other):
return Apply(mul, as_tohu_generator(other), self)
def eq_generators(self, other):
return Apply(eq, self, as_tohu_generator(other))
def lt_generators(self, other):
return Apply(lt, self, as_tohu_generator(other))
def le_generators(self, other):
return Apply(le, self, as_tohu_generator(other))
def gt_generators(self, other):
return Apply(gt, self, as_tohu_generator(other))
def ge_generators(self, other):
return Apply(ge, self, as_tohu_generator(other))
# Patch TohuBaseGenerator with the new methods
TohuBaseGenerator.__add__ = add_generators
TohuBaseGenerator.__radd__ = radd_generators
TohuBaseGenerator.__mul__ = mul_generators
TohuBaseGenerator.__rmul__ = rmul_generators
TohuBaseGenerator.__eq__ = eq_generators
TohuBaseGenerator.__lt__ = lt_generators
TohuBaseGenerator.__le__ = le_generators
TohuBaseGenerator.__gt__ = gt_generators
TohuBaseGenerator.__ge__ = ge_generators
def split_geolocation(self):
attributes = ['lon', 'lat'] + self.include_attributes
return tuple(GetAttribute(self, attr_name) for attr_name in attributes)
GeoJSONGeolocation.split = split_geolocation
|
"""
This module is not meant to be imported directly.
Its purpose is to patch the TohuBaseGenerator class
so that its special methods __add__, __mul__ etc.
support other generators as arguments.
"""
from operator import add, mul, gt, ge, lt, le, eq
from .base import TohuBaseGenerator
from .primitive_generators import GeoJSONGeolocation
from .derived_generators import GetAttribute
__all__ = []
def split_geolocation(self):
attributes = ['lon', 'lat'] + self.include_attributes
return tuple(GetAttribute(self, attr_name) for attr_name in attributes)
GeoJSONGeolocation.split = split_geolocation
Set special methods on TohuBaseGenerator to allow e.g. adding two generators"""
This module is not meant to be imported directly.
Its purpose is to patch the TohuBaseGenerator class
so that its special methods __add__, __mul__ etc.
support other generators as arguments.
"""
from .base import TohuBaseGenerator
from .primitive_generators import GeoJSONGeolocation, as_tohu_generator
from .derived_generators import Apply, GetAttribute
from operator import add, mul, gt, ge, lt, le, eq
__all__ = []
def add_generators(self, other):
return Apply(add, self, as_tohu_generator(other))
def radd_generators(self, other):
return Apply(add, as_tohu_generator(other), self)
def mul_generators(self, other):
return Apply(mul, self, as_tohu_generator(other))
def rmul_generators(self, other):
return Apply(mul, as_tohu_generator(other), self)
def eq_generators(self, other):
return Apply(eq, self, as_tohu_generator(other))
def lt_generators(self, other):
return Apply(lt, self, as_tohu_generator(other))
def le_generators(self, other):
return Apply(le, self, as_tohu_generator(other))
def gt_generators(self, other):
return Apply(gt, self, as_tohu_generator(other))
def ge_generators(self, other):
return Apply(ge, self, as_tohu_generator(other))
# Patch TohuBaseGenerator with the new methods
TohuBaseGenerator.__add__ = add_generators
TohuBaseGenerator.__radd__ = radd_generators
TohuBaseGenerator.__mul__ = mul_generators
TohuBaseGenerator.__rmul__ = rmul_generators
TohuBaseGenerator.__eq__ = eq_generators
TohuBaseGenerator.__lt__ = lt_generators
TohuBaseGenerator.__le__ = le_generators
TohuBaseGenerator.__gt__ = gt_generators
TohuBaseGenerator.__ge__ = ge_generators
def split_geolocation(self):
attributes = ['lon', 'lat'] + self.include_attributes
return tuple(GetAttribute(self, attr_name) for attr_name in attributes)
GeoJSONGeolocation.split = split_geolocation
|
<commit_before>"""
This module is not meant to be imported directly.
Its purpose is to patch the TohuBaseGenerator class
so that its special methods __add__, __mul__ etc.
support other generators as arguments.
"""
from operator import add, mul, gt, ge, lt, le, eq
from .base import TohuBaseGenerator
from .primitive_generators import GeoJSONGeolocation
from .derived_generators import GetAttribute
__all__ = []
def split_geolocation(self):
attributes = ['lon', 'lat'] + self.include_attributes
return tuple(GetAttribute(self, attr_name) for attr_name in attributes)
GeoJSONGeolocation.split = split_geolocation
<commit_msg>Set special methods on TohuBaseGenerator to allow e.g. adding two generators<commit_after>"""
This module is not meant to be imported directly.
Its purpose is to patch the TohuBaseGenerator class
so that its special methods __add__, __mul__ etc.
support other generators as arguments.
"""
from .base import TohuBaseGenerator
from .primitive_generators import GeoJSONGeolocation, as_tohu_generator
from .derived_generators import Apply, GetAttribute
from operator import add, mul, gt, ge, lt, le, eq
__all__ = []
def add_generators(self, other):
return Apply(add, self, as_tohu_generator(other))
def radd_generators(self, other):
return Apply(add, as_tohu_generator(other), self)
def mul_generators(self, other):
return Apply(mul, self, as_tohu_generator(other))
def rmul_generators(self, other):
return Apply(mul, as_tohu_generator(other), self)
def eq_generators(self, other):
return Apply(eq, self, as_tohu_generator(other))
def lt_generators(self, other):
return Apply(lt, self, as_tohu_generator(other))
def le_generators(self, other):
return Apply(le, self, as_tohu_generator(other))
def gt_generators(self, other):
return Apply(gt, self, as_tohu_generator(other))
def ge_generators(self, other):
return Apply(ge, self, as_tohu_generator(other))
# Patch TohuBaseGenerator with the new methods
TohuBaseGenerator.__add__ = add_generators
TohuBaseGenerator.__radd__ = radd_generators
TohuBaseGenerator.__mul__ = mul_generators
TohuBaseGenerator.__rmul__ = rmul_generators
TohuBaseGenerator.__eq__ = eq_generators
TohuBaseGenerator.__lt__ = lt_generators
TohuBaseGenerator.__le__ = le_generators
TohuBaseGenerator.__gt__ = gt_generators
TohuBaseGenerator.__ge__ = ge_generators
def split_geolocation(self):
attributes = ['lon', 'lat'] + self.include_attributes
return tuple(GetAttribute(self, attr_name) for attr_name in attributes)
GeoJSONGeolocation.split = split_geolocation
|
abd7987e698e9102a7d737f3b32296d703ae0a7c
|
scripts/buildAll.py
|
scripts/buildAll.py
|
#! /usr/bin/python
#
# Build the entire ISAAC Project
#
#
#
import subprocess
import os
import sys
projects = ['va-ochre',
'va-isaac-metadata',
'va-isaac-mojo',
'va-newtons-cradle',
'va-logic',
'va-query-service',
'va-isaac-gui',
'va-solor-goods',
'va-expression-service',
'va-isaac-gui-pa']
args = ['-e', 'clean']
def mvn(*args):
print("Running " )
print(args)
print(" in " + os.getcwd())
mvnLocation = "C:\\Program Files\\Maven\\bin\\mvn.bat"
return print("Output: " + subprocess.check_call([mvnLocation] + list(args)))
for project in projects:
if os.path.isdir(os.getcwd() + project)== False:
cwd = os.getcwd()
print("In: " + cwd + " Entering project " + project)
print("Enviorment: " + os.environ['JAVA_HOME'])
os.chdir(project)
if project == 'va-expression-service' or project == 'va-isaac-gui-pa':
args.extend(['package'])
else:
args.extend(['install'])
if(len(sys.argv) > 1):
if(str(sys.argv[1]) == 'skipTest'):
args.extend(['-DskipTests'])
print ("Build Argument")
print (args)
#This fails the build, if it results in a non-0 exit status
mvn(args)
os.chdir(os.pardir)
else:
print(project + " folder does not exists in " + os.getcwd())
|
#! /usr/bin/python
#
# Build the entire ISAAC Project
#
#
#
import subprocess
import os
import sys
projects = ['va-isaac-parent',
'va-ochre',
'va-isaac-metadata',
'va-isaac-mojo',
'va-newtons-cradle',
'va-logic',
'va-query-service',
'va-isaac-gui',
'va-solor-goods',
'va-expression-service',
'va-isaac-gui-pa']
defaultArgs = ['-e', 'clean']
def mvn(args):
return subprocess.check_call(['mvn'] + args)
for project in projects:
cwd = os.getcwd()
print("In: " + cwd + " Entering project " + project)
os.chdir(project)
args = defaultArgs[:]
if project == 'va-expression-service' or project == 'va-isaac-gui-pa':
args.extend(['package'])
else:
args.extend(['install'])
print ("Build Argument")
print (args)
#This fails the build, if it results in a non-0 exit status
mvn(args)
os.chdir(os.pardir)
|
Revert "Fixed a windows bug that prevented run locating the Maven batch file to run mvn commands. This will need to be modified to run on Linux"
|
Revert "Fixed a windows bug that prevented run locating the Maven batch file to run mvn commands. This will need to be modified to run on Linux"
This reverts commit b9dfad50415d6f7d90d66da30a1a55cbe62a07ef.
|
Python
|
apache-2.0
|
Apelon-VA/va-isaac-docs
|
#! /usr/bin/python
#
# Build the entire ISAAC Project
#
#
#
import subprocess
import os
import sys
projects = ['va-ochre',
'va-isaac-metadata',
'va-isaac-mojo',
'va-newtons-cradle',
'va-logic',
'va-query-service',
'va-isaac-gui',
'va-solor-goods',
'va-expression-service',
'va-isaac-gui-pa']
args = ['-e', 'clean']
def mvn(*args):
print("Running " )
print(args)
print(" in " + os.getcwd())
mvnLocation = "C:\\Program Files\\Maven\\bin\\mvn.bat"
return print("Output: " + subprocess.check_call([mvnLocation] + list(args)))
for project in projects:
if os.path.isdir(os.getcwd() + project)== False:
cwd = os.getcwd()
print("In: " + cwd + " Entering project " + project)
print("Enviorment: " + os.environ['JAVA_HOME'])
os.chdir(project)
if project == 'va-expression-service' or project == 'va-isaac-gui-pa':
args.extend(['package'])
else:
args.extend(['install'])
if(len(sys.argv) > 1):
if(str(sys.argv[1]) == 'skipTest'):
args.extend(['-DskipTests'])
print ("Build Argument")
print (args)
#This fails the build, if it results in a non-0 exit status
mvn(args)
os.chdir(os.pardir)
else:
print(project + " folder does not exists in " + os.getcwd())
Revert "Fixed a windows bug that prevented run locating the Maven batch file to run mvn commands. This will need to be modified to run on Linux"
This reverts commit b9dfad50415d6f7d90d66da30a1a55cbe62a07ef.
|
#! /usr/bin/python
#
# Build the entire ISAAC Project
#
#
#
import subprocess
import os
import sys
projects = ['va-isaac-parent',
'va-ochre',
'va-isaac-metadata',
'va-isaac-mojo',
'va-newtons-cradle',
'va-logic',
'va-query-service',
'va-isaac-gui',
'va-solor-goods',
'va-expression-service',
'va-isaac-gui-pa']
defaultArgs = ['-e', 'clean']
def mvn(args):
return subprocess.check_call(['mvn'] + args)
for project in projects:
cwd = os.getcwd()
print("In: " + cwd + " Entering project " + project)
os.chdir(project)
args = defaultArgs[:]
if project == 'va-expression-service' or project == 'va-isaac-gui-pa':
args.extend(['package'])
else:
args.extend(['install'])
print ("Build Argument")
print (args)
#This fails the build, if it results in a non-0 exit status
mvn(args)
os.chdir(os.pardir)
|
<commit_before>#! /usr/bin/python
#
# Build the entire ISAAC Project
#
#
#
import subprocess
import os
import sys
projects = ['va-ochre',
'va-isaac-metadata',
'va-isaac-mojo',
'va-newtons-cradle',
'va-logic',
'va-query-service',
'va-isaac-gui',
'va-solor-goods',
'va-expression-service',
'va-isaac-gui-pa']
args = ['-e', 'clean']
def mvn(*args):
print("Running " )
print(args)
print(" in " + os.getcwd())
mvnLocation = "C:\\Program Files\\Maven\\bin\\mvn.bat"
return print("Output: " + subprocess.check_call([mvnLocation] + list(args)))
for project in projects:
if os.path.isdir(os.getcwd() + project)== False:
cwd = os.getcwd()
print("In: " + cwd + " Entering project " + project)
print("Enviorment: " + os.environ['JAVA_HOME'])
os.chdir(project)
if project == 'va-expression-service' or project == 'va-isaac-gui-pa':
args.extend(['package'])
else:
args.extend(['install'])
if(len(sys.argv) > 1):
if(str(sys.argv[1]) == 'skipTest'):
args.extend(['-DskipTests'])
print ("Build Argument")
print (args)
#This fails the build, if it results in a non-0 exit status
mvn(args)
os.chdir(os.pardir)
else:
print(project + " folder does not exists in " + os.getcwd())
<commit_msg>Revert "Fixed a windows bug that prevented run locating the Maven batch file to run mvn commands. This will need to be modified to run on Linux"
This reverts commit b9dfad50415d6f7d90d66da30a1a55cbe62a07ef.<commit_after>
|
#! /usr/bin/python
#
# Build the entire ISAAC Project
#
#
#
import subprocess
import os
import sys
projects = ['va-isaac-parent',
'va-ochre',
'va-isaac-metadata',
'va-isaac-mojo',
'va-newtons-cradle',
'va-logic',
'va-query-service',
'va-isaac-gui',
'va-solor-goods',
'va-expression-service',
'va-isaac-gui-pa']
defaultArgs = ['-e', 'clean']
def mvn(args):
return subprocess.check_call(['mvn'] + args)
for project in projects:
cwd = os.getcwd()
print("In: " + cwd + " Entering project " + project)
os.chdir(project)
args = defaultArgs[:]
if project == 'va-expression-service' or project == 'va-isaac-gui-pa':
args.extend(['package'])
else:
args.extend(['install'])
print ("Build Argument")
print (args)
#This fails the build, if it results in a non-0 exit status
mvn(args)
os.chdir(os.pardir)
|
#! /usr/bin/python
#
# Build the entire ISAAC Project
#
#
#
import subprocess
import os
import sys
projects = ['va-ochre',
'va-isaac-metadata',
'va-isaac-mojo',
'va-newtons-cradle',
'va-logic',
'va-query-service',
'va-isaac-gui',
'va-solor-goods',
'va-expression-service',
'va-isaac-gui-pa']
args = ['-e', 'clean']
def mvn(*args):
print("Running " )
print(args)
print(" in " + os.getcwd())
mvnLocation = "C:\\Program Files\\Maven\\bin\\mvn.bat"
return print("Output: " + subprocess.check_call([mvnLocation] + list(args)))
for project in projects:
if os.path.isdir(os.getcwd() + project)== False:
cwd = os.getcwd()
print("In: " + cwd + " Entering project " + project)
print("Enviorment: " + os.environ['JAVA_HOME'])
os.chdir(project)
if project == 'va-expression-service' or project == 'va-isaac-gui-pa':
args.extend(['package'])
else:
args.extend(['install'])
if(len(sys.argv) > 1):
if(str(sys.argv[1]) == 'skipTest'):
args.extend(['-DskipTests'])
print ("Build Argument")
print (args)
#This fails the build, if it results in a non-0 exit status
mvn(args)
os.chdir(os.pardir)
else:
print(project + " folder does not exists in " + os.getcwd())
Revert "Fixed a windows bug that prevented run locating the Maven batch file to run mvn commands. This will need to be modified to run on Linux"
This reverts commit b9dfad50415d6f7d90d66da30a1a55cbe62a07ef.#! /usr/bin/python
#
# Build the entire ISAAC Project
#
#
#
import subprocess
import os
import sys
projects = ['va-isaac-parent',
'va-ochre',
'va-isaac-metadata',
'va-isaac-mojo',
'va-newtons-cradle',
'va-logic',
'va-query-service',
'va-isaac-gui',
'va-solor-goods',
'va-expression-service',
'va-isaac-gui-pa']
defaultArgs = ['-e', 'clean']
def mvn(args):
return subprocess.check_call(['mvn'] + args)
for project in projects:
cwd = os.getcwd()
print("In: " + cwd + " Entering project " + project)
os.chdir(project)
args = defaultArgs[:]
if project == 'va-expression-service' or project == 'va-isaac-gui-pa':
args.extend(['package'])
else:
args.extend(['install'])
print ("Build Argument")
print (args)
#This fails the build, if it results in a non-0 exit status
mvn(args)
os.chdir(os.pardir)
|
<commit_before>#! /usr/bin/python
#
# Build the entire ISAAC Project
#
#
#
import subprocess
import os
import sys
projects = ['va-ochre',
'va-isaac-metadata',
'va-isaac-mojo',
'va-newtons-cradle',
'va-logic',
'va-query-service',
'va-isaac-gui',
'va-solor-goods',
'va-expression-service',
'va-isaac-gui-pa']
args = ['-e', 'clean']
def mvn(*args):
print("Running " )
print(args)
print(" in " + os.getcwd())
mvnLocation = "C:\\Program Files\\Maven\\bin\\mvn.bat"
return print("Output: " + subprocess.check_call([mvnLocation] + list(args)))
for project in projects:
if os.path.isdir(os.getcwd() + project)== False:
cwd = os.getcwd()
print("In: " + cwd + " Entering project " + project)
print("Enviorment: " + os.environ['JAVA_HOME'])
os.chdir(project)
if project == 'va-expression-service' or project == 'va-isaac-gui-pa':
args.extend(['package'])
else:
args.extend(['install'])
if(len(sys.argv) > 1):
if(str(sys.argv[1]) == 'skipTest'):
args.extend(['-DskipTests'])
print ("Build Argument")
print (args)
#This fails the build, if it results in a non-0 exit status
mvn(args)
os.chdir(os.pardir)
else:
print(project + " folder does not exists in " + os.getcwd())
<commit_msg>Revert "Fixed a windows bug that prevented run locating the Maven batch file to run mvn commands. This will need to be modified to run on Linux"
This reverts commit b9dfad50415d6f7d90d66da30a1a55cbe62a07ef.<commit_after>#! /usr/bin/python
#
# Build the entire ISAAC Project
#
#
#
import subprocess
import os
import sys
projects = ['va-isaac-parent',
'va-ochre',
'va-isaac-metadata',
'va-isaac-mojo',
'va-newtons-cradle',
'va-logic',
'va-query-service',
'va-isaac-gui',
'va-solor-goods',
'va-expression-service',
'va-isaac-gui-pa']
defaultArgs = ['-e', 'clean']
def mvn(args):
return subprocess.check_call(['mvn'] + args)
for project in projects:
cwd = os.getcwd()
print("In: " + cwd + " Entering project " + project)
os.chdir(project)
args = defaultArgs[:]
if project == 'va-expression-service' or project == 'va-isaac-gui-pa':
args.extend(['package'])
else:
args.extend(['install'])
print ("Build Argument")
print (args)
#This fails the build, if it results in a non-0 exit status
mvn(args)
os.chdir(os.pardir)
|
62d7924f6f5097845a21408e975cae1dfff01c1c
|
android/app/src/main/assets/python/enamlnative/widgets/analog_clock.py
|
android/app/src/main/assets/python/enamlnative/widgets/analog_clock.py
|
'''
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on May 20, 2017
@author: jrm
'''
from atom.api import (
Typed, ForwardTyped, Unicode, observe
)
from enaml.core.declarative import d_
from .text_view import TextView, ProxyTextView
class ProxyAnalogClock(ProxyTextView):
""" The abstract definition of a proxy AnalogClock object.
"""
#: A reference to the Label declaration.
declaration = ForwardTyped(lambda: AnalogClock)
class AnalogClock(TextView):
""" A simple control for displaying an AnalogClock
"""
#: A reference to the proxy object.
proxy = Typed(ProxyAnalogClock)
|
'''
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on May 20, 2017
@author: jrm
'''
from atom.api import (
Typed, ForwardTyped, Unicode, observe
)
from enaml.core.declarative import d_
from .view import View, ProxyView
class ProxyAnalogClock(ProxyView):
""" The abstract definition of a proxy AnalogClock object.
"""
#: A reference to the Label declaration.
declaration = ForwardTyped(lambda: AnalogClock)
class AnalogClock(View):
""" A simple control for displaying an AnalogClock
"""
#: A reference to the proxy object.
proxy = Typed(ProxyAnalogClock)
|
Use correct parent class for clock
|
Use correct parent class for clock
|
Python
|
mit
|
codelv/enaml-native,codelv/enaml-native,codelv/enaml-native,codelv/enaml-native
|
'''
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on May 20, 2017
@author: jrm
'''
from atom.api import (
Typed, ForwardTyped, Unicode, observe
)
from enaml.core.declarative import d_
from .text_view import TextView, ProxyTextView
class ProxyAnalogClock(ProxyTextView):
""" The abstract definition of a proxy AnalogClock object.
"""
#: A reference to the Label declaration.
declaration = ForwardTyped(lambda: AnalogClock)
class AnalogClock(TextView):
""" A simple control for displaying an AnalogClock
"""
#: A reference to the proxy object.
proxy = Typed(ProxyAnalogClock)
Use correct parent class for clock
|
'''
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on May 20, 2017
@author: jrm
'''
from atom.api import (
Typed, ForwardTyped, Unicode, observe
)
from enaml.core.declarative import d_
from .view import View, ProxyView
class ProxyAnalogClock(ProxyView):
""" The abstract definition of a proxy AnalogClock object.
"""
#: A reference to the Label declaration.
declaration = ForwardTyped(lambda: AnalogClock)
class AnalogClock(View):
""" A simple control for displaying an AnalogClock
"""
#: A reference to the proxy object.
proxy = Typed(ProxyAnalogClock)
|
<commit_before>'''
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on May 20, 2017
@author: jrm
'''
from atom.api import (
Typed, ForwardTyped, Unicode, observe
)
from enaml.core.declarative import d_
from .text_view import TextView, ProxyTextView
class ProxyAnalogClock(ProxyTextView):
""" The abstract definition of a proxy AnalogClock object.
"""
#: A reference to the Label declaration.
declaration = ForwardTyped(lambda: AnalogClock)
class AnalogClock(TextView):
""" A simple control for displaying an AnalogClock
"""
#: A reference to the proxy object.
proxy = Typed(ProxyAnalogClock)
<commit_msg>Use correct parent class for clock<commit_after>
|
'''
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on May 20, 2017
@author: jrm
'''
from atom.api import (
Typed, ForwardTyped, Unicode, observe
)
from enaml.core.declarative import d_
from .view import View, ProxyView
class ProxyAnalogClock(ProxyView):
""" The abstract definition of a proxy AnalogClock object.
"""
#: A reference to the Label declaration.
declaration = ForwardTyped(lambda: AnalogClock)
class AnalogClock(View):
""" A simple control for displaying an AnalogClock
"""
#: A reference to the proxy object.
proxy = Typed(ProxyAnalogClock)
|
'''
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on May 20, 2017
@author: jrm
'''
from atom.api import (
Typed, ForwardTyped, Unicode, observe
)
from enaml.core.declarative import d_
from .text_view import TextView, ProxyTextView
class ProxyAnalogClock(ProxyTextView):
""" The abstract definition of a proxy AnalogClock object.
"""
#: A reference to the Label declaration.
declaration = ForwardTyped(lambda: AnalogClock)
class AnalogClock(TextView):
""" A simple control for displaying an AnalogClock
"""
#: A reference to the proxy object.
proxy = Typed(ProxyAnalogClock)
Use correct parent class for clock'''
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on May 20, 2017
@author: jrm
'''
from atom.api import (
Typed, ForwardTyped, Unicode, observe
)
from enaml.core.declarative import d_
from .view import View, ProxyView
class ProxyAnalogClock(ProxyView):
""" The abstract definition of a proxy AnalogClock object.
"""
#: A reference to the Label declaration.
declaration = ForwardTyped(lambda: AnalogClock)
class AnalogClock(View):
""" A simple control for displaying an AnalogClock
"""
#: A reference to the proxy object.
proxy = Typed(ProxyAnalogClock)
|
<commit_before>'''
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on May 20, 2017
@author: jrm
'''
from atom.api import (
Typed, ForwardTyped, Unicode, observe
)
from enaml.core.declarative import d_
from .text_view import TextView, ProxyTextView
class ProxyAnalogClock(ProxyTextView):
""" The abstract definition of a proxy AnalogClock object.
"""
#: A reference to the Label declaration.
declaration = ForwardTyped(lambda: AnalogClock)
class AnalogClock(TextView):
""" A simple control for displaying an AnalogClock
"""
#: A reference to the proxy object.
proxy = Typed(ProxyAnalogClock)
<commit_msg>Use correct parent class for clock<commit_after>'''
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on May 20, 2017
@author: jrm
'''
from atom.api import (
Typed, ForwardTyped, Unicode, observe
)
from enaml.core.declarative import d_
from .view import View, ProxyView
class ProxyAnalogClock(ProxyView):
""" The abstract definition of a proxy AnalogClock object.
"""
#: A reference to the Label declaration.
declaration = ForwardTyped(lambda: AnalogClock)
class AnalogClock(View):
""" A simple control for displaying an AnalogClock
"""
#: A reference to the proxy object.
proxy = Typed(ProxyAnalogClock)
|
634f718aa4fe4052a8dc9be1f82078ebcd2338df
|
build-release.py
|
build-release.py
|
import sys
import glob
import re
NEW_VERSION = sys.argv[1]
with open('VERSION') as f:
VERSION=f.read()
print NEW_VERSION
print VERSION
def set_assemblyinfo_version(version):
aLineRe = "AssemblyVersion|AssemblyFileVersion\(\"([\.0-9]+)\"\)"
aVersionRe = "(\d\.\d\.\d)"
print "Changing version numbers in AssemblyInfo.cs files"
for name in glob.glob("./*/Properties/AssemblyInfo.cs"):
print "Working on " + name
new_file = []
with open(name) as f:
for line in f.readlines():
if line.startswith("//") != True:
reProg = re.compile(aLineRe)
result = reProg.search(line)
if (result != None):
line = re.sub(aVersionRe, NEW_VERSION, line)
new_file.append(line)
with open(name, "w") as f:
f.write("".join(new_file))
def set_version_file(version):
print "Working on VERSION file"
with open("VERSION", "w") as f:
f.write(version)
set_verion_file(NEW_VERSION)
set_assemblyinfo_version(NEW_VERSION)
|
import sys
import glob
import re
NEW_VERSION = sys.argv[1]
with open('VERSION') as f:
VERSION=f.read()
print NEW_VERSION
print VERSION
def set_assemblyinfo_version(version):
aLineRe = "AssemblyVersion|AssemblyFileVersion\(\"([\.0-9]+)\"\)"
aVersionRe = "(\d\.\d\.\d)"
print "Changing version numbers in AssemblyInfo.cs files"
for name in glob.glob("./*/Properties/AssemblyInfo.cs"):
print "Working on " + name
new_file = []
with open(name) as f:
for line in f.readlines():
if line.startswith("//") != True:
reProg = re.compile(aLineRe)
result = reProg.search(line)
if (result != None):
line = re.sub(aVersionRe, NEW_VERSION, line)
new_file.append(line)
with open(name, "w") as f:
f.write("".join(new_file))
set_assemblyinfo_version(NEW_VERSION)
|
Revert "working on version file"
|
Revert "working on version file"
This reverts commit cb159cd3d907aeaa65f6a293d95a0aa7d7f2fee8.
|
Python
|
mit
|
psistats/windows-client,psistats/windows-client,psistats/windows-client
|
import sys
import glob
import re
NEW_VERSION = sys.argv[1]
with open('VERSION') as f:
VERSION=f.read()
print NEW_VERSION
print VERSION
def set_assemblyinfo_version(version):
aLineRe = "AssemblyVersion|AssemblyFileVersion\(\"([\.0-9]+)\"\)"
aVersionRe = "(\d\.\d\.\d)"
print "Changing version numbers in AssemblyInfo.cs files"
for name in glob.glob("./*/Properties/AssemblyInfo.cs"):
print "Working on " + name
new_file = []
with open(name) as f:
for line in f.readlines():
if line.startswith("//") != True:
reProg = re.compile(aLineRe)
result = reProg.search(line)
if (result != None):
line = re.sub(aVersionRe, NEW_VERSION, line)
new_file.append(line)
with open(name, "w") as f:
f.write("".join(new_file))
def set_version_file(version):
print "Working on VERSION file"
with open("VERSION", "w") as f:
f.write(version)
set_verion_file(NEW_VERSION)
set_assemblyinfo_version(NEW_VERSION)Revert "working on version file"
This reverts commit cb159cd3d907aeaa65f6a293d95a0aa7d7f2fee8.
|
import sys
import glob
import re
NEW_VERSION = sys.argv[1]
with open('VERSION') as f:
VERSION=f.read()
print NEW_VERSION
print VERSION
def set_assemblyinfo_version(version):
aLineRe = "AssemblyVersion|AssemblyFileVersion\(\"([\.0-9]+)\"\)"
aVersionRe = "(\d\.\d\.\d)"
print "Changing version numbers in AssemblyInfo.cs files"
for name in glob.glob("./*/Properties/AssemblyInfo.cs"):
print "Working on " + name
new_file = []
with open(name) as f:
for line in f.readlines():
if line.startswith("//") != True:
reProg = re.compile(aLineRe)
result = reProg.search(line)
if (result != None):
line = re.sub(aVersionRe, NEW_VERSION, line)
new_file.append(line)
with open(name, "w") as f:
f.write("".join(new_file))
set_assemblyinfo_version(NEW_VERSION)
|
<commit_before>import sys
import glob
import re
NEW_VERSION = sys.argv[1]
with open('VERSION') as f:
VERSION=f.read()
print NEW_VERSION
print VERSION
def set_assemblyinfo_version(version):
aLineRe = "AssemblyVersion|AssemblyFileVersion\(\"([\.0-9]+)\"\)"
aVersionRe = "(\d\.\d\.\d)"
print "Changing version numbers in AssemblyInfo.cs files"
for name in glob.glob("./*/Properties/AssemblyInfo.cs"):
print "Working on " + name
new_file = []
with open(name) as f:
for line in f.readlines():
if line.startswith("//") != True:
reProg = re.compile(aLineRe)
result = reProg.search(line)
if (result != None):
line = re.sub(aVersionRe, NEW_VERSION, line)
new_file.append(line)
with open(name, "w") as f:
f.write("".join(new_file))
def set_version_file(version):
print "Working on VERSION file"
with open("VERSION", "w") as f:
f.write(version)
set_verion_file(NEW_VERSION)
set_assemblyinfo_version(NEW_VERSION)<commit_msg>Revert "working on version file"
This reverts commit cb159cd3d907aeaa65f6a293d95a0aa7d7f2fee8.<commit_after>
|
import sys
import glob
import re
NEW_VERSION = sys.argv[1]
with open('VERSION') as f:
VERSION=f.read()
print NEW_VERSION
print VERSION
def set_assemblyinfo_version(version):
aLineRe = "AssemblyVersion|AssemblyFileVersion\(\"([\.0-9]+)\"\)"
aVersionRe = "(\d\.\d\.\d)"
print "Changing version numbers in AssemblyInfo.cs files"
for name in glob.glob("./*/Properties/AssemblyInfo.cs"):
print "Working on " + name
new_file = []
with open(name) as f:
for line in f.readlines():
if line.startswith("//") != True:
reProg = re.compile(aLineRe)
result = reProg.search(line)
if (result != None):
line = re.sub(aVersionRe, NEW_VERSION, line)
new_file.append(line)
with open(name, "w") as f:
f.write("".join(new_file))
set_assemblyinfo_version(NEW_VERSION)
|
import sys
import glob
import re
NEW_VERSION = sys.argv[1]
with open('VERSION') as f:
VERSION=f.read()
print NEW_VERSION
print VERSION
def set_assemblyinfo_version(version):
aLineRe = "AssemblyVersion|AssemblyFileVersion\(\"([\.0-9]+)\"\)"
aVersionRe = "(\d\.\d\.\d)"
print "Changing version numbers in AssemblyInfo.cs files"
for name in glob.glob("./*/Properties/AssemblyInfo.cs"):
print "Working on " + name
new_file = []
with open(name) as f:
for line in f.readlines():
if line.startswith("//") != True:
reProg = re.compile(aLineRe)
result = reProg.search(line)
if (result != None):
line = re.sub(aVersionRe, NEW_VERSION, line)
new_file.append(line)
with open(name, "w") as f:
f.write("".join(new_file))
def set_version_file(version):
print "Working on VERSION file"
with open("VERSION", "w") as f:
f.write(version)
set_verion_file(NEW_VERSION)
set_assemblyinfo_version(NEW_VERSION)Revert "working on version file"
This reverts commit cb159cd3d907aeaa65f6a293d95a0aa7d7f2fee8.import sys
import glob
import re
NEW_VERSION = sys.argv[1]
with open('VERSION') as f:
VERSION=f.read()
print NEW_VERSION
print VERSION
def set_assemblyinfo_version(version):
aLineRe = "AssemblyVersion|AssemblyFileVersion\(\"([\.0-9]+)\"\)"
aVersionRe = "(\d\.\d\.\d)"
print "Changing version numbers in AssemblyInfo.cs files"
for name in glob.glob("./*/Properties/AssemblyInfo.cs"):
print "Working on " + name
new_file = []
with open(name) as f:
for line in f.readlines():
if line.startswith("//") != True:
reProg = re.compile(aLineRe)
result = reProg.search(line)
if (result != None):
line = re.sub(aVersionRe, NEW_VERSION, line)
new_file.append(line)
with open(name, "w") as f:
f.write("".join(new_file))
set_assemblyinfo_version(NEW_VERSION)
|
<commit_before>import sys
import glob
import re
NEW_VERSION = sys.argv[1]
with open('VERSION') as f:
VERSION=f.read()
print NEW_VERSION
print VERSION
def set_assemblyinfo_version(version):
aLineRe = "AssemblyVersion|AssemblyFileVersion\(\"([\.0-9]+)\"\)"
aVersionRe = "(\d\.\d\.\d)"
print "Changing version numbers in AssemblyInfo.cs files"
for name in glob.glob("./*/Properties/AssemblyInfo.cs"):
print "Working on " + name
new_file = []
with open(name) as f:
for line in f.readlines():
if line.startswith("//") != True:
reProg = re.compile(aLineRe)
result = reProg.search(line)
if (result != None):
line = re.sub(aVersionRe, NEW_VERSION, line)
new_file.append(line)
with open(name, "w") as f:
f.write("".join(new_file))
def set_version_file(version):
print "Working on VERSION file"
with open("VERSION", "w") as f:
f.write(version)
set_verion_file(NEW_VERSION)
set_assemblyinfo_version(NEW_VERSION)<commit_msg>Revert "working on version file"
This reverts commit cb159cd3d907aeaa65f6a293d95a0aa7d7f2fee8.<commit_after>import sys
import glob
import re
NEW_VERSION = sys.argv[1]
with open('VERSION') as f:
VERSION=f.read()
print NEW_VERSION
print VERSION
def set_assemblyinfo_version(version):
aLineRe = "AssemblyVersion|AssemblyFileVersion\(\"([\.0-9]+)\"\)"
aVersionRe = "(\d\.\d\.\d)"
print "Changing version numbers in AssemblyInfo.cs files"
for name in glob.glob("./*/Properties/AssemblyInfo.cs"):
print "Working on " + name
new_file = []
with open(name) as f:
for line in f.readlines():
if line.startswith("//") != True:
reProg = re.compile(aLineRe)
result = reProg.search(line)
if (result != None):
line = re.sub(aVersionRe, NEW_VERSION, line)
new_file.append(line)
with open(name, "w") as f:
f.write("".join(new_file))
set_assemblyinfo_version(NEW_VERSION)
|
d4c5e7a9d9b6fb795c5a16cf6a7d12f5ec32b160
|
peas-demo/plugins/pythonhello/pythonhello.py
|
peas-demo/plugins/pythonhello/pythonhello.py
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
from gi.repository import Peas
from gi.repository import Gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(gobject.GObject, Peas.Activatable):
__gtype_name__ = 'PythonHelloPlugin'
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = Gtk.Label()
window._pythonhello_label.set_text(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label, True, True, 0)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
def do_update_state(self, window):
print "PythonHelloPlugin.do_update_state", repr(window)
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
from gi.repository import Peas
from gi.repository import PeasUI
from gi.repository import Gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(gobject.GObject, Peas.Activatable):
__gtype_name__ = 'PythonHelloPlugin'
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = Gtk.Label()
window._pythonhello_label.set_text(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label, True, True, 0)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
def do_update_state(self, window):
print "PythonHelloPlugin.do_update_state", repr(window)
class PythonHelloConfigurable(gobject.GObject, PeasUI.Configurable):
__gtype_name__ = 'PythonHelloConfigurable'
def do_create_configure_widget(self):
return Gtk.Label.new("Python Hello configure widget")
|
Add a configure dialog to the python plugin.
|
peas-demo: Add a configure dialog to the python plugin.
|
Python
|
lgpl-2.1
|
chergert/libpeas,chergert/libpeas,GNOME/libpeas,Distrotech/libpeas,GNOME/libpeas,Distrotech/libpeas,gregier/libpeas,Distrotech/libpeas,gregier/libpeas,gregier/libpeas,chergert/libpeas,gregier/libpeas
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
from gi.repository import Peas
from gi.repository import Gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(gobject.GObject, Peas.Activatable):
__gtype_name__ = 'PythonHelloPlugin'
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = Gtk.Label()
window._pythonhello_label.set_text(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label, True, True, 0)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
def do_update_state(self, window):
print "PythonHelloPlugin.do_update_state", repr(window)
peas-demo: Add a configure dialog to the python plugin.
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
from gi.repository import Peas
from gi.repository import PeasUI
from gi.repository import Gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(gobject.GObject, Peas.Activatable):
__gtype_name__ = 'PythonHelloPlugin'
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = Gtk.Label()
window._pythonhello_label.set_text(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label, True, True, 0)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
def do_update_state(self, window):
print "PythonHelloPlugin.do_update_state", repr(window)
class PythonHelloConfigurable(gobject.GObject, PeasUI.Configurable):
__gtype_name__ = 'PythonHelloConfigurable'
def do_create_configure_widget(self):
return Gtk.Label.new("Python Hello configure widget")
|
<commit_before># -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
from gi.repository import Peas
from gi.repository import Gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(gobject.GObject, Peas.Activatable):
__gtype_name__ = 'PythonHelloPlugin'
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = Gtk.Label()
window._pythonhello_label.set_text(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label, True, True, 0)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
def do_update_state(self, window):
print "PythonHelloPlugin.do_update_state", repr(window)
<commit_msg>peas-demo: Add a configure dialog to the python plugin.<commit_after>
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
from gi.repository import Peas
from gi.repository import PeasUI
from gi.repository import Gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(gobject.GObject, Peas.Activatable):
__gtype_name__ = 'PythonHelloPlugin'
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = Gtk.Label()
window._pythonhello_label.set_text(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label, True, True, 0)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
def do_update_state(self, window):
print "PythonHelloPlugin.do_update_state", repr(window)
class PythonHelloConfigurable(gobject.GObject, PeasUI.Configurable):
__gtype_name__ = 'PythonHelloConfigurable'
def do_create_configure_widget(self):
return Gtk.Label.new("Python Hello configure widget")
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
from gi.repository import Peas
from gi.repository import Gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(gobject.GObject, Peas.Activatable):
__gtype_name__ = 'PythonHelloPlugin'
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = Gtk.Label()
window._pythonhello_label.set_text(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label, True, True, 0)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
def do_update_state(self, window):
print "PythonHelloPlugin.do_update_state", repr(window)
peas-demo: Add a configure dialog to the python plugin.# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
from gi.repository import Peas
from gi.repository import PeasUI
from gi.repository import Gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(gobject.GObject, Peas.Activatable):
__gtype_name__ = 'PythonHelloPlugin'
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = Gtk.Label()
window._pythonhello_label.set_text(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label, True, True, 0)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
def do_update_state(self, window):
print "PythonHelloPlugin.do_update_state", repr(window)
class PythonHelloConfigurable(gobject.GObject, PeasUI.Configurable):
__gtype_name__ = 'PythonHelloConfigurable'
def do_create_configure_widget(self):
return Gtk.Label.new("Python Hello configure widget")
|
<commit_before># -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
from gi.repository import Peas
from gi.repository import Gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(gobject.GObject, Peas.Activatable):
__gtype_name__ = 'PythonHelloPlugin'
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = Gtk.Label()
window._pythonhello_label.set_text(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label, True, True, 0)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
def do_update_state(self, window):
print "PythonHelloPlugin.do_update_state", repr(window)
<commit_msg>peas-demo: Add a configure dialog to the python plugin.<commit_after># -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
from gi.repository import Peas
from gi.repository import PeasUI
from gi.repository import Gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(gobject.GObject, Peas.Activatable):
__gtype_name__ = 'PythonHelloPlugin'
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = Gtk.Label()
window._pythonhello_label.set_text(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label, True, True, 0)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
def do_update_state(self, window):
print "PythonHelloPlugin.do_update_state", repr(window)
class PythonHelloConfigurable(gobject.GObject, PeasUI.Configurable):
__gtype_name__ = 'PythonHelloConfigurable'
def do_create_configure_widget(self):
return Gtk.Label.new("Python Hello configure widget")
|
aeafebbb2bb5ddf4e2d2ddd47cd16d8ed515ac1b
|
portal/models.py
|
portal/models.py
|
from django.db import models
from common.util.generator import get_random_id
class Student(models.Model):
# ToDo: Make username unique
username = models.CharField(max_length=7)
magic_id = models.CharField(max_length=8)
child = models.BooleanField()
def __str__(self):
return self.username
def save(self, *args, **kwargs):
if len(self.magic_id) == 0:
self.magic_id = get_random_id()
super(Student, self).save(*args, **kwargs)
def get_new_student_popup(self):
message = "Email has been sent to %s@ic.ac.uk. Go activate your account." % self.username
return {'message': message, 'state': 'success'}
def get_existing_student_popup(self):
message = "Account already exists. Activation email re-sent to %s@ic.ac.uk." % self.username
return {'message': message, 'state': 'warning'}
|
from django.db import models
from common.util.generator import get_random_id
class Student(models.Model):
username = models.CharField(max_length=7,unique=True)
magic_id = models.CharField(max_length=8)
child = models.BooleanField()
def __str__(self):
return self.username
def save(self, *args, **kwargs):
if len(self.magic_id) == 0:
self.magic_id = get_random_id()
super(Student, self).save(*args, **kwargs)
def get_new_student_popup(self):
message = "Email has been sent to %s@ic.ac.uk. Go activate your account." % self.username
return {'message': message, 'state': 'success'}
def get_existing_student_popup(self):
message = "Account already exists. Activation email re-sent to %s@ic.ac.uk." % self.username
return {'message': message, 'state': 'warning'}
|
Enforce unique user names in the database model
|
Enforce unique user names in the database model
Set unique=TRUE and deleted TODO comment line
|
Python
|
mit
|
martinzlocha/mad,martinzlocha/mad,martinzlocha/mad
|
from django.db import models
from common.util.generator import get_random_id
class Student(models.Model):
# ToDo: Make username unique
username = models.CharField(max_length=7)
magic_id = models.CharField(max_length=8)
child = models.BooleanField()
def __str__(self):
return self.username
def save(self, *args, **kwargs):
if len(self.magic_id) == 0:
self.magic_id = get_random_id()
super(Student, self).save(*args, **kwargs)
def get_new_student_popup(self):
message = "Email has been sent to %s@ic.ac.uk. Go activate your account." % self.username
return {'message': message, 'state': 'success'}
def get_existing_student_popup(self):
message = "Account already exists. Activation email re-sent to %s@ic.ac.uk." % self.username
return {'message': message, 'state': 'warning'}
Enforce unique user names in the database model
Set unique=TRUE and deleted TODO comment line
|
from django.db import models
from common.util.generator import get_random_id
class Student(models.Model):
username = models.CharField(max_length=7,unique=True)
magic_id = models.CharField(max_length=8)
child = models.BooleanField()
def __str__(self):
return self.username
def save(self, *args, **kwargs):
if len(self.magic_id) == 0:
self.magic_id = get_random_id()
super(Student, self).save(*args, **kwargs)
def get_new_student_popup(self):
message = "Email has been sent to %s@ic.ac.uk. Go activate your account." % self.username
return {'message': message, 'state': 'success'}
def get_existing_student_popup(self):
message = "Account already exists. Activation email re-sent to %s@ic.ac.uk." % self.username
return {'message': message, 'state': 'warning'}
|
<commit_before>from django.db import models
from common.util.generator import get_random_id
class Student(models.Model):
# ToDo: Make username unique
username = models.CharField(max_length=7)
magic_id = models.CharField(max_length=8)
child = models.BooleanField()
def __str__(self):
return self.username
def save(self, *args, **kwargs):
if len(self.magic_id) == 0:
self.magic_id = get_random_id()
super(Student, self).save(*args, **kwargs)
def get_new_student_popup(self):
message = "Email has been sent to %s@ic.ac.uk. Go activate your account." % self.username
return {'message': message, 'state': 'success'}
def get_existing_student_popup(self):
message = "Account already exists. Activation email re-sent to %s@ic.ac.uk." % self.username
return {'message': message, 'state': 'warning'}
<commit_msg>Enforce unique user names in the database model
Set unique=TRUE and deleted TODO comment line<commit_after>
|
from django.db import models
from common.util.generator import get_random_id
class Student(models.Model):
username = models.CharField(max_length=7,unique=True)
magic_id = models.CharField(max_length=8)
child = models.BooleanField()
def __str__(self):
return self.username
def save(self, *args, **kwargs):
if len(self.magic_id) == 0:
self.magic_id = get_random_id()
super(Student, self).save(*args, **kwargs)
def get_new_student_popup(self):
message = "Email has been sent to %s@ic.ac.uk. Go activate your account." % self.username
return {'message': message, 'state': 'success'}
def get_existing_student_popup(self):
message = "Account already exists. Activation email re-sent to %s@ic.ac.uk." % self.username
return {'message': message, 'state': 'warning'}
|
from django.db import models
from common.util.generator import get_random_id
class Student(models.Model):
# ToDo: Make username unique
username = models.CharField(max_length=7)
magic_id = models.CharField(max_length=8)
child = models.BooleanField()
def __str__(self):
return self.username
def save(self, *args, **kwargs):
if len(self.magic_id) == 0:
self.magic_id = get_random_id()
super(Student, self).save(*args, **kwargs)
def get_new_student_popup(self):
message = "Email has been sent to %s@ic.ac.uk. Go activate your account." % self.username
return {'message': message, 'state': 'success'}
def get_existing_student_popup(self):
message = "Account already exists. Activation email re-sent to %s@ic.ac.uk." % self.username
return {'message': message, 'state': 'warning'}
Enforce unique user names in the database model
Set unique=TRUE and deleted TODO comment linefrom django.db import models
from common.util.generator import get_random_id
class Student(models.Model):
username = models.CharField(max_length=7,unique=True)
magic_id = models.CharField(max_length=8)
child = models.BooleanField()
def __str__(self):
return self.username
def save(self, *args, **kwargs):
if len(self.magic_id) == 0:
self.magic_id = get_random_id()
super(Student, self).save(*args, **kwargs)
def get_new_student_popup(self):
message = "Email has been sent to %s@ic.ac.uk. Go activate your account." % self.username
return {'message': message, 'state': 'success'}
def get_existing_student_popup(self):
message = "Account already exists. Activation email re-sent to %s@ic.ac.uk." % self.username
return {'message': message, 'state': 'warning'}
|
<commit_before>from django.db import models
from common.util.generator import get_random_id
class Student(models.Model):
# ToDo: Make username unique
username = models.CharField(max_length=7)
magic_id = models.CharField(max_length=8)
child = models.BooleanField()
def __str__(self):
return self.username
def save(self, *args, **kwargs):
if len(self.magic_id) == 0:
self.magic_id = get_random_id()
super(Student, self).save(*args, **kwargs)
def get_new_student_popup(self):
message = "Email has been sent to %s@ic.ac.uk. Go activate your account." % self.username
return {'message': message, 'state': 'success'}
def get_existing_student_popup(self):
message = "Account already exists. Activation email re-sent to %s@ic.ac.uk." % self.username
return {'message': message, 'state': 'warning'}
<commit_msg>Enforce unique user names in the database model
Set unique=TRUE and deleted TODO comment line<commit_after>from django.db import models
from common.util.generator import get_random_id
class Student(models.Model):
username = models.CharField(max_length=7,unique=True)
magic_id = models.CharField(max_length=8)
child = models.BooleanField()
def __str__(self):
return self.username
def save(self, *args, **kwargs):
if len(self.magic_id) == 0:
self.magic_id = get_random_id()
super(Student, self).save(*args, **kwargs)
def get_new_student_popup(self):
message = "Email has been sent to %s@ic.ac.uk. Go activate your account." % self.username
return {'message': message, 'state': 'success'}
def get_existing_student_popup(self):
message = "Account already exists. Activation email re-sent to %s@ic.ac.uk." % self.username
return {'message': message, 'state': 'warning'}
|
f38eb25fe13320297baad173c8e6d6ac7cfb9542
|
spacy/tests/tokens/test_vec.py
|
spacy/tests/tokens/test_vec.py
|
from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert 0.08 >= hype.vector[0] > 0.07
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert 0.08 >= hype.vector[0] > 0.07
|
from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert -0.7 >= hype.vector[0] > -0.8
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert -0.7 >= hype.vector[0] > -0.8
|
Fix test for word vector
|
Fix test for word vector
|
Python
|
mit
|
oroszgy/spaCy.hu,recognai/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,banglakit/spaCy,explosion/spaCy,explosion/spaCy,raphael0202/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,honnibal/spaCy,recognai/spaCy,banglakit/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,recognai/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,explosion/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,honnibal/spaCy,honnibal/spaCy,raphael0202/spaCy,explosion/spaCy,raphael0202/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,banglakit/spaCy,raphael0202/spaCy,banglakit/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,raphael0202/spaCy,spacy-io/spaCy,spacy-io/spaCy
|
from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert 0.08 >= hype.vector[0] > 0.07
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert 0.08 >= hype.vector[0] > 0.07
Fix test for word vector
|
from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert -0.7 >= hype.vector[0] > -0.8
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert -0.7 >= hype.vector[0] > -0.8
|
<commit_before>from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert 0.08 >= hype.vector[0] > 0.07
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert 0.08 >= hype.vector[0] > 0.07
<commit_msg>Fix test for word vector<commit_after>
|
from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert -0.7 >= hype.vector[0] > -0.8
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert -0.7 >= hype.vector[0] > -0.8
|
from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert 0.08 >= hype.vector[0] > 0.07
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert 0.08 >= hype.vector[0] > 0.07
Fix test for word vectorfrom __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert -0.7 >= hype.vector[0] > -0.8
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert -0.7 >= hype.vector[0] > -0.8
|
<commit_before>from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert 0.08 >= hype.vector[0] > 0.07
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert 0.08 >= hype.vector[0] > 0.07
<commit_msg>Fix test for word vector<commit_after>from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert -0.7 >= hype.vector[0] > -0.8
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert -0.7 >= hype.vector[0] > -0.8
|
e81c56e1f3b682e0cfffa40851aed817be3b1812
|
etcd3/__init__.py
|
etcd3/__init__.py
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Lease',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
|
Add Lease to public api
|
Add Lease to public api
|
Python
|
apache-2.0
|
kragniz/python-etcd3
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
Add Lease to public api
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Lease',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
|
<commit_before>from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
<commit_msg>Add Lease to public api<commit_after>
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Lease',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
Add Lease to public apifrom __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Lease',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
|
<commit_before>from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
<commit_msg>Add Lease to public api<commit_after>from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Lease',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
|
00a7f13ac2dbbd7449fd0ce260a21448c67b73e9
|
birdwatch/api.py
|
birdwatch/api.py
|
__author__ = 'jloeffler'
def list_projects():
return {}
def list_contributors():
return {}
|
__author__ = 'jloeffler'
from github3 import GitHub
from github3.models import GitHubError
from github3.repos.repo import Repository
from birdwatch.configuration import configuration
from birdwatch.collector import Project
def list_projects():
# just for testing
github = GitHub(token=configuration.github_token)
zalando_repos = github.iter_user_repos("zalando")
projects = {'projects': [{'name': repo.name} for repo in zalando_repos if repo.name == 'birdwatch']}
print(projects)
return projects
def list_contributors():
return {}
|
Return one project for testing
|
Return one project for testing
|
Python
|
apache-2.0
|
marky-mark/catwatch,AlexanderYastrebov/catwatch,AlexanderYastrebov/catwatch,marky-mark/catwatch,AlexanderYastrebov/catwatch,marky-mark/catwatch,AlexanderYastrebov/catwatch,marky-mark/catwatch
|
__author__ = 'jloeffler'
def list_projects():
return {}
def list_contributors():
return {}
Return one project for testing
|
__author__ = 'jloeffler'
from github3 import GitHub
from github3.models import GitHubError
from github3.repos.repo import Repository
from birdwatch.configuration import configuration
from birdwatch.collector import Project
def list_projects():
# just for testing
github = GitHub(token=configuration.github_token)
zalando_repos = github.iter_user_repos("zalando")
projects = {'projects': [{'name': repo.name} for repo in zalando_repos if repo.name == 'birdwatch']}
print(projects)
return projects
def list_contributors():
return {}
|
<commit_before>__author__ = 'jloeffler'
def list_projects():
return {}
def list_contributors():
return {}
<commit_msg>Return one project for testing<commit_after>
|
__author__ = 'jloeffler'
from github3 import GitHub
from github3.models import GitHubError
from github3.repos.repo import Repository
from birdwatch.configuration import configuration
from birdwatch.collector import Project
def list_projects():
# just for testing
github = GitHub(token=configuration.github_token)
zalando_repos = github.iter_user_repos("zalando")
projects = {'projects': [{'name': repo.name} for repo in zalando_repos if repo.name == 'birdwatch']}
print(projects)
return projects
def list_contributors():
return {}
|
__author__ = 'jloeffler'
def list_projects():
return {}
def list_contributors():
return {}
Return one project for testing__author__ = 'jloeffler'
from github3 import GitHub
from github3.models import GitHubError
from github3.repos.repo import Repository
from birdwatch.configuration import configuration
from birdwatch.collector import Project
def list_projects():
# just for testing
github = GitHub(token=configuration.github_token)
zalando_repos = github.iter_user_repos("zalando")
projects = {'projects': [{'name': repo.name} for repo in zalando_repos if repo.name == 'birdwatch']}
print(projects)
return projects
def list_contributors():
return {}
|
<commit_before>__author__ = 'jloeffler'
def list_projects():
return {}
def list_contributors():
return {}
<commit_msg>Return one project for testing<commit_after>__author__ = 'jloeffler'
from github3 import GitHub
from github3.models import GitHubError
from github3.repos.repo import Repository
from birdwatch.configuration import configuration
from birdwatch.collector import Project
def list_projects():
# just for testing
github = GitHub(token=configuration.github_token)
zalando_repos = github.iter_user_repos("zalando")
projects = {'projects': [{'name': repo.name} for repo in zalando_repos if repo.name == 'birdwatch']}
print(projects)
return projects
def list_contributors():
return {}
|
36d3c2f81ea39968bc58bab172e6bf035147ae3c
|
mpld3/test_plots/test_logscale.py
|
mpld3/test_plots/test_logscale.py
|
"""Plot to test logscale"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
|
"""
Plot to test logscale
TODO (@vladh): `sharex` and `sharey` seem to cause the tick labels to go nuts. This needs to
be fixed.
"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
|
Add TODO to broken test
|
Add TODO to broken test
|
Python
|
bsd-3-clause
|
mpld3/mpld3,jakevdp/mpld3,jakevdp/mpld3,mpld3/mpld3
|
"""Plot to test logscale"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
Add TODO to broken test
|
"""
Plot to test logscale
TODO (@vladh): `sharex` and `sharey` seem to cause the tick labels to go nuts. This needs to
be fixed.
"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
|
<commit_before>"""Plot to test logscale"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
<commit_msg>Add TODO to broken test<commit_after>
|
"""
Plot to test logscale
TODO (@vladh): `sharex` and `sharey` seem to cause the tick labels to go nuts. This needs to
be fixed.
"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
|
"""Plot to test logscale"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
Add TODO to broken test"""
Plot to test logscale
TODO (@vladh): `sharex` and `sharey` seem to cause the tick labels to go nuts. This needs to
be fixed.
"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
|
<commit_before>"""Plot to test logscale"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
<commit_msg>Add TODO to broken test<commit_after>"""
Plot to test logscale
TODO (@vladh): `sharex` and `sharey` seem to cause the tick labels to go nuts. This needs to
be fixed.
"""
import matplotlib.pyplot as plt
import numpy as np
import mpld3
def create_plot():
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2, sharey=ax1, xscale='log')
ax3 = fig.add_subplot(2, 2, 3, sharex=ax1, yscale='log')
ax4 = fig.add_subplot(2, 2, 4, sharex=ax2, sharey=ax3)
x = np.linspace(1, 1e2)
y = x ** 2
for ax in [ax1, ax2, ax3, ax4]:
ax.plot(x, y)
return fig
def test_logscale():
fig = create_plot()
html = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot())
|
692f13b9dbe994baf44bf42384e956608b94fede
|
aldryn_apphooks_config/utils.py
|
aldryn_apphooks_config/utils.py
|
# -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if request.current_page:
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
# -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
Add better check for being in a CMS-page request
|
Add better check for being in a CMS-page request
|
Python
|
bsd-3-clause
|
aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config
|
# -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if request.current_page:
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)Add better check for being in a CMS-page request
|
# -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
<commit_before># -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if request.current_page:
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)<commit_msg>Add better check for being in a CMS-page request<commit_after>
|
# -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
# -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if request.current_page:
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)Add better check for being in a CMS-page request# -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
<commit_before># -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if request.current_page:
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)<commit_msg>Add better check for being in a CMS-page request<commit_after># -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
829ad434f42b457294d44108b26c6880cd0e4c36
|
pymatgen/__init__.py
|
pymatgen/__init__.py
|
__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .util.io_utils import zopen
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
|
__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
|
Remove zopen in pymatgen root.
|
Remove zopen in pymatgen root.
Former-commit-id: 375be0147716d3b4d2dee95680eae4ee3804716b [formerly 05648421c1fa77f6f339f68be2c43bb7952e918a]
Former-commit-id: e5cfaf0277815951ddb09d9d6b30876e400870d7
|
Python
|
mit
|
dongsenfo/pymatgen,Bismarrck/pymatgen,czhengsci/pymatgen,fraricci/pymatgen,blondegeek/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,richardtran415/pymatgen,vorwerkc/pymatgen,fraricci/pymatgen,tallakahath/pymatgen,mbkumar/pymatgen,davidwaroquiers/pymatgen,ndardenne/pymatgen,montoyjh/pymatgen,xhqu1981/pymatgen,vorwerkc/pymatgen,matk86/pymatgen,gVallverdu/pymatgen,Bismarrck/pymatgen,fraricci/pymatgen,tallakahath/pymatgen,montoyjh/pymatgen,mbkumar/pymatgen,dongsenfo/pymatgen,ndardenne/pymatgen,dongsenfo/pymatgen,setten/pymatgen,mbkumar/pymatgen,gpetretto/pymatgen,nisse3000/pymatgen,Bismarrck/pymatgen,gVallverdu/pymatgen,tallakahath/pymatgen,gpetretto/pymatgen,richardtran415/pymatgen,matk86/pymatgen,johnson1228/pymatgen,czhengsci/pymatgen,matk86/pymatgen,gVallverdu/pymatgen,davidwaroquiers/pymatgen,nisse3000/pymatgen,vorwerkc/pymatgen,gmatteo/pymatgen,Bismarrck/pymatgen,mbkumar/pymatgen,tschaume/pymatgen,aykol/pymatgen,gmatteo/pymatgen,setten/pymatgen,vorwerkc/pymatgen,czhengsci/pymatgen,blondegeek/pymatgen,aykol/pymatgen,aykol/pymatgen,nisse3000/pymatgen,xhqu1981/pymatgen,richardtran415/pymatgen,gVallverdu/pymatgen,tschaume/pymatgen,setten/pymatgen,fraricci/pymatgen,ndardenne/pymatgen,tschaume/pymatgen,richardtran415/pymatgen,setten/pymatgen,czhengsci/pymatgen,johnson1228/pymatgen,tschaume/pymatgen,xhqu1981/pymatgen,gpetretto/pymatgen,blondegeek/pymatgen,johnson1228/pymatgen,dongsenfo/pymatgen,matk86/pymatgen,blondegeek/pymatgen,tschaume/pymatgen,gpetretto/pymatgen,nisse3000/pymatgen,davidwaroquiers/pymatgen,montoyjh/pymatgen,Bismarrck/pymatgen,davidwaroquiers/pymatgen
|
__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .util.io_utils import zopen
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
Remove zopen in pymatgen root.
Former-commit-id: 375be0147716d3b4d2dee95680eae4ee3804716b [formerly 05648421c1fa77f6f339f68be2c43bb7952e918a]
Former-commit-id: e5cfaf0277815951ddb09d9d6b30876e400870d7
|
__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
|
<commit_before>__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .util.io_utils import zopen
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
<commit_msg>Remove zopen in pymatgen root.
Former-commit-id: 375be0147716d3b4d2dee95680eae4ee3804716b [formerly 05648421c1fa77f6f339f68be2c43bb7952e918a]
Former-commit-id: e5cfaf0277815951ddb09d9d6b30876e400870d7<commit_after>
|
__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
|
__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .util.io_utils import zopen
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
Remove zopen in pymatgen root.
Former-commit-id: 375be0147716d3b4d2dee95680eae4ee3804716b [formerly 05648421c1fa77f6f339f68be2c43bb7952e918a]
Former-commit-id: e5cfaf0277815951ddb09d9d6b30876e400870d7__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
|
<commit_before>__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .util.io_utils import zopen
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
<commit_msg>Remove zopen in pymatgen root.
Former-commit-id: 375be0147716d3b4d2dee95680eae4ee3804716b [formerly 05648421c1fa77f6f339f68be2c43bb7952e918a]
Former-commit-id: e5cfaf0277815951ddb09d9d6b30876e400870d7<commit_after>__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
|
b800f746e73634fe04a9a1ec45ef62dd7528f219
|
comrade/utils.py
|
comrade/utils.py
|
import random
import hashlib
import base64
def generate_key():
key = hashlib.sha224(str(random.getrandbits(256))).digest()
key = base64.b64encode(key,
random.choice(['rA','aZ','gQ','hH','hG','aR','DD']))
key = key.rstrip('==')
return key
def chunked(seq, n):
"""By Ned Batchelder.
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
for i in xrange(0, len(seq), n):
yield seq[i:i+n]
def flatten(lst):
for elem in lst:
if hasattr(elem, '__iter__'):
for e in flatten(elem):
yield e
else:
yield elem
def find_dict_key(dictionary, search_value):
for key, value in dictionary.iteritems():
if value == search_value:
return key
def extract(dictionary, keys):
"""Returns a new dictionary with only the keys from the dictionary passed in
specified in the keys list.
"""
return dict((key, dictionary[key]) for key in keys if key in dictionary)
|
import random
import hashlib
import base64
def generate_key():
key = hashlib.sha224(str(random.getrandbits(256))).digest()
key = base64.b64encode(key,
random.choice(['rA','aZ','gQ','hH','hG','aR','DD']))
key = key.rstrip('==').lower()
return key
def chunked(seq, n):
"""By Ned Batchelder.
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
for i in xrange(0, len(seq), n):
yield seq[i:i+n]
def flatten(lst):
for elem in lst:
if hasattr(elem, '__iter__'):
for e in flatten(elem):
yield e
else:
yield elem
def find_dict_key(dictionary, search_value):
for key, value in dictionary.iteritems():
if value == search_value:
return key
def extract(dictionary, keys):
"""Returns a new dictionary with only the keys from the dictionary passed in
specified in the keys list.
"""
return dict((key, dictionary[key]) for key in keys if key in dictionary)
|
Return lowercased random tokens for consistency.
|
Return lowercased random tokens for consistency.
|
Python
|
mit
|
bueda/django-comrade
|
import random
import hashlib
import base64
def generate_key():
key = hashlib.sha224(str(random.getrandbits(256))).digest()
key = base64.b64encode(key,
random.choice(['rA','aZ','gQ','hH','hG','aR','DD']))
key = key.rstrip('==')
return key
def chunked(seq, n):
"""By Ned Batchelder.
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
for i in xrange(0, len(seq), n):
yield seq[i:i+n]
def flatten(lst):
for elem in lst:
if hasattr(elem, '__iter__'):
for e in flatten(elem):
yield e
else:
yield elem
def find_dict_key(dictionary, search_value):
for key, value in dictionary.iteritems():
if value == search_value:
return key
def extract(dictionary, keys):
"""Returns a new dictionary with only the keys from the dictionary passed in
specified in the keys list.
"""
return dict((key, dictionary[key]) for key in keys if key in dictionary)
Return lowercased random tokens for consistency.
|
import random
import hashlib
import base64
def generate_key():
key = hashlib.sha224(str(random.getrandbits(256))).digest()
key = base64.b64encode(key,
random.choice(['rA','aZ','gQ','hH','hG','aR','DD']))
key = key.rstrip('==').lower()
return key
def chunked(seq, n):
"""By Ned Batchelder.
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
for i in xrange(0, len(seq), n):
yield seq[i:i+n]
def flatten(lst):
for elem in lst:
if hasattr(elem, '__iter__'):
for e in flatten(elem):
yield e
else:
yield elem
def find_dict_key(dictionary, search_value):
for key, value in dictionary.iteritems():
if value == search_value:
return key
def extract(dictionary, keys):
"""Returns a new dictionary with only the keys from the dictionary passed in
specified in the keys list.
"""
return dict((key, dictionary[key]) for key in keys if key in dictionary)
|
<commit_before>import random
import hashlib
import base64
def generate_key():
key = hashlib.sha224(str(random.getrandbits(256))).digest()
key = base64.b64encode(key,
random.choice(['rA','aZ','gQ','hH','hG','aR','DD']))
key = key.rstrip('==')
return key
def chunked(seq, n):
"""By Ned Batchelder.
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
for i in xrange(0, len(seq), n):
yield seq[i:i+n]
def flatten(lst):
for elem in lst:
if hasattr(elem, '__iter__'):
for e in flatten(elem):
yield e
else:
yield elem
def find_dict_key(dictionary, search_value):
for key, value in dictionary.iteritems():
if value == search_value:
return key
def extract(dictionary, keys):
"""Returns a new dictionary with only the keys from the dictionary passed in
specified in the keys list.
"""
return dict((key, dictionary[key]) for key in keys if key in dictionary)
<commit_msg>Return lowercased random tokens for consistency.<commit_after>
|
import random
import hashlib
import base64
def generate_key():
key = hashlib.sha224(str(random.getrandbits(256))).digest()
key = base64.b64encode(key,
random.choice(['rA','aZ','gQ','hH','hG','aR','DD']))
key = key.rstrip('==').lower()
return key
def chunked(seq, n):
"""By Ned Batchelder.
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
for i in xrange(0, len(seq), n):
yield seq[i:i+n]
def flatten(lst):
for elem in lst:
if hasattr(elem, '__iter__'):
for e in flatten(elem):
yield e
else:
yield elem
def find_dict_key(dictionary, search_value):
for key, value in dictionary.iteritems():
if value == search_value:
return key
def extract(dictionary, keys):
"""Returns a new dictionary with only the keys from the dictionary passed in
specified in the keys list.
"""
return dict((key, dictionary[key]) for key in keys if key in dictionary)
|
import random
import hashlib
import base64
def generate_key():
key = hashlib.sha224(str(random.getrandbits(256))).digest()
key = base64.b64encode(key,
random.choice(['rA','aZ','gQ','hH','hG','aR','DD']))
key = key.rstrip('==')
return key
def chunked(seq, n):
"""By Ned Batchelder.
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
for i in xrange(0, len(seq), n):
yield seq[i:i+n]
def flatten(lst):
for elem in lst:
if hasattr(elem, '__iter__'):
for e in flatten(elem):
yield e
else:
yield elem
def find_dict_key(dictionary, search_value):
for key, value in dictionary.iteritems():
if value == search_value:
return key
def extract(dictionary, keys):
"""Returns a new dictionary with only the keys from the dictionary passed in
specified in the keys list.
"""
return dict((key, dictionary[key]) for key in keys if key in dictionary)
Return lowercased random tokens for consistency.import random
import hashlib
import base64
def generate_key():
key = hashlib.sha224(str(random.getrandbits(256))).digest()
key = base64.b64encode(key,
random.choice(['rA','aZ','gQ','hH','hG','aR','DD']))
key = key.rstrip('==').lower()
return key
def chunked(seq, n):
"""By Ned Batchelder.
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
for i in xrange(0, len(seq), n):
yield seq[i:i+n]
def flatten(lst):
for elem in lst:
if hasattr(elem, '__iter__'):
for e in flatten(elem):
yield e
else:
yield elem
def find_dict_key(dictionary, search_value):
for key, value in dictionary.iteritems():
if value == search_value:
return key
def extract(dictionary, keys):
"""Returns a new dictionary with only the keys from the dictionary passed in
specified in the keys list.
"""
return dict((key, dictionary[key]) for key in keys if key in dictionary)
|
<commit_before>import random
import hashlib
import base64
def generate_key():
key = hashlib.sha224(str(random.getrandbits(256))).digest()
key = base64.b64encode(key,
random.choice(['rA','aZ','gQ','hH','hG','aR','DD']))
key = key.rstrip('==')
return key
def chunked(seq, n):
"""By Ned Batchelder.
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
for i in xrange(0, len(seq), n):
yield seq[i:i+n]
def flatten(lst):
for elem in lst:
if hasattr(elem, '__iter__'):
for e in flatten(elem):
yield e
else:
yield elem
def find_dict_key(dictionary, search_value):
for key, value in dictionary.iteritems():
if value == search_value:
return key
def extract(dictionary, keys):
"""Returns a new dictionary with only the keys from the dictionary passed in
specified in the keys list.
"""
return dict((key, dictionary[key]) for key in keys if key in dictionary)
<commit_msg>Return lowercased random tokens for consistency.<commit_after>import random
import hashlib
import base64
def generate_key():
key = hashlib.sha224(str(random.getrandbits(256))).digest()
key = base64.b64encode(key,
random.choice(['rA','aZ','gQ','hH','hG','aR','DD']))
key = key.rstrip('==').lower()
return key
def chunked(seq, n):
"""By Ned Batchelder.
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
for i in xrange(0, len(seq), n):
yield seq[i:i+n]
def flatten(lst):
for elem in lst:
if hasattr(elem, '__iter__'):
for e in flatten(elem):
yield e
else:
yield elem
def find_dict_key(dictionary, search_value):
for key, value in dictionary.iteritems():
if value == search_value:
return key
def extract(dictionary, keys):
"""Returns a new dictionary with only the keys from the dictionary passed in
specified in the keys list.
"""
return dict((key, dictionary[key]) for key in keys if key in dictionary)
|
0563882d0d1bfdf4e64a65bcd91e8d6d4ab6ed8f
|
core/polyaxon/polypod/compiler/lineage/artifacts_collector.py
|
core/polyaxon/polypod/compiler/lineage/artifacts_collector.py
|
#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name),
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
|
#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name) if name else "_",
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
|
Fix artifacts name sanitization for root folders
|
Fix artifacts name sanitization for root folders
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name),
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
Fix artifacts name sanitization for root folders
|
#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name) if name else "_",
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
|
<commit_before>#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name),
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
<commit_msg>Fix artifacts name sanitization for root folders<commit_after>
|
#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name) if name else "_",
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
|
#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name),
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
Fix artifacts name sanitization for root folders#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name) if name else "_",
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
|
<commit_before>#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name),
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
<commit_msg>Fix artifacts name sanitization for root folders<commit_after>#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name) if name else "_",
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
|
365da65390a0c2093fbbc5681c72cbfbd73ae78f
|
rctk/widgets/text.py
|
rctk/widgets/text.py
|
from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
|
from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20, **properties):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk, **properties)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
|
Allow additional properties on Text
|
Allow additional properties on Text
git-svn-id: ec97508af0aa29a1d296967d6f0ba22a468c79d6@350 286bb87c-ec97-11de-a004-2f18c49ebcc3
|
Python
|
bsd-2-clause
|
rctk/rctk,rctk/rctk
|
from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
Allow additional properties on Text
git-svn-id: ec97508af0aa29a1d296967d6f0ba22a468c79d6@350 286bb87c-ec97-11de-a004-2f18c49ebcc3
|
from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20, **properties):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk, **properties)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
|
<commit_before>from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
<commit_msg>Allow additional properties on Text
git-svn-id: ec97508af0aa29a1d296967d6f0ba22a468c79d6@350 286bb87c-ec97-11de-a004-2f18c49ebcc3<commit_after>
|
from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20, **properties):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk, **properties)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
|
from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
Allow additional properties on Text
git-svn-id: ec97508af0aa29a1d296967d6f0ba22a468c79d6@350 286bb87c-ec97-11de-a004-2f18c49ebcc3from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20, **properties):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk, **properties)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
|
<commit_before>from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
<commit_msg>Allow additional properties on Text
git-svn-id: ec97508af0aa29a1d296967d6f0ba22a468c79d6@350 286bb87c-ec97-11de-a004-2f18c49ebcc3<commit_after>from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20, **properties):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk, **properties)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
|
5d1fe61d152d2c5544982322a9f156809ea267f0
|
main.py
|
main.py
|
from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if event['type'] == 'message':
message = event['text']
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
|
from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if 'type' in event and event['type'] == 'message' \
and 'text' in event:
message = event['text'].encode('ascii', 'ignore')
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
|
Fix crashes from misc. events
|
Fix crashes from misc. events
|
Python
|
mit
|
Spferical/slack-megahal,Spferical/matrix-chatbot,Spferical/matrix-chatbot,Spferical/matrix-megahal
|
from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if event['type'] == 'message':
message = event['text']
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
Fix crashes from misc. events
|
from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if 'type' in event and event['type'] == 'message' \
and 'text' in event:
message = event['text'].encode('ascii', 'ignore')
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if event['type'] == 'message':
message = event['text']
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
<commit_msg>Fix crashes from misc. events<commit_after>
|
from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if 'type' in event and event['type'] == 'message' \
and 'text' in event:
message = event['text'].encode('ascii', 'ignore')
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
|
from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if event['type'] == 'message':
message = event['text']
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
Fix crashes from misc. eventsfrom __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if 'type' in event and event['type'] == 'message' \
and 'text' in event:
message = event['text'].encode('ascii', 'ignore')
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if event['type'] == 'message':
message = event['text']
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
<commit_msg>Fix crashes from misc. events<commit_after>from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if 'type' in event and event['type'] == 'message' \
and 'text' in event:
message = event['text'].encode('ascii', 'ignore')
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
|
152dfbb9fc5ca5fe5c859fea5ba4a25a31f3ff13
|
gn/compile_processors.py
|
gn/compile_processors.py
|
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
print("Recompiling " + p + "...")
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
|
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
|
Remove "Recompiling..." output when building .fp files
|
Remove "Recompiling..." output when building .fp files
Change-Id: I41402dc04d4388217d7f7cd8de9aff8fbb4a3765
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/317391
Reviewed-by: John Stiles <f4fcf42d3bb5924557f1eeb3be66747535e585da@google.com>
Commit-Queue: Brian Osman <794c0b5534edf5601d88e1d41975d0262da12894@google.com>
|
Python
|
bsd-3-clause
|
google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,google/skia,google/skia,google/skia,google/skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia
|
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
print("Recompiling " + p + "...")
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
Remove "Recompiling..." output when building .fp files
Change-Id: I41402dc04d4388217d7f7cd8de9aff8fbb4a3765
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/317391
Reviewed-by: John Stiles <f4fcf42d3bb5924557f1eeb3be66747535e585da@google.com>
Commit-Queue: Brian Osman <794c0b5534edf5601d88e1d41975d0262da12894@google.com>
|
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
|
<commit_before>#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
print("Recompiling " + p + "...")
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
<commit_msg>Remove "Recompiling..." output when building .fp files
Change-Id: I41402dc04d4388217d7f7cd8de9aff8fbb4a3765
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/317391
Reviewed-by: John Stiles <f4fcf42d3bb5924557f1eeb3be66747535e585da@google.com>
Commit-Queue: Brian Osman <794c0b5534edf5601d88e1d41975d0262da12894@google.com><commit_after>
|
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
|
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
print("Recompiling " + p + "...")
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
Remove "Recompiling..." output when building .fp files
Change-Id: I41402dc04d4388217d7f7cd8de9aff8fbb4a3765
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/317391
Reviewed-by: John Stiles <f4fcf42d3bb5924557f1eeb3be66747535e585da@google.com>
Commit-Queue: Brian Osman <794c0b5534edf5601d88e1d41975d0262da12894@google.com>#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
|
<commit_before>#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
print("Recompiling " + p + "...")
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
<commit_msg>Remove "Recompiling..." output when building .fp files
Change-Id: I41402dc04d4388217d7f7cd8de9aff8fbb4a3765
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/317391
Reviewed-by: John Stiles <f4fcf42d3bb5924557f1eeb3be66747535e585da@google.com>
Commit-Queue: Brian Osman <794c0b5534edf5601d88e1d41975d0262da12894@google.com><commit_after>#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
|
c1508d51a90db1ebf3c0278c777ff3169e0d13f9
|
tests/unit/test_wrapper.py
|
tests/unit/test_wrapper.py
|
import numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
|
import numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
def test_echo_1d_array_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(np.array([1]))
assert ret == 1
|
Add test for 1d array arguments for Wrapper
|
Add test for 1d array arguments for Wrapper
|
Python
|
bsd-3-clause
|
lintusj1/elfi,elfi-dev/elfi,lintusj1/elfi,HIIT/elfi,elfi-dev/elfi
|
import numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
Add test for 1d array arguments for Wrapper
|
import numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
def test_echo_1d_array_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(np.array([1]))
assert ret == 1
|
<commit_before>import numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
<commit_msg>Add test for 1d array arguments for Wrapper<commit_after>
|
import numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
def test_echo_1d_array_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(np.array([1]))
assert ret == 1
|
import numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
Add test for 1d array arguments for Wrapperimport numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
def test_echo_1d_array_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(np.array([1]))
assert ret == 1
|
<commit_before>import numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
<commit_msg>Add test for 1d array arguments for Wrapper<commit_after>import numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
def test_echo_1d_array_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(np.array([1]))
assert ret == 1
|
90655c89fcf56af06a69f8110a9f7154294ca11c
|
ritter/analytics/sentiment_analyzer.py
|
ritter/analytics/sentiment_analyzer.py
|
import re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
|
import re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2, undersample=True)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
|
Update to Sentimental 2.2.x with undersampling
|
feat: Update to Sentimental 2.2.x with undersampling
|
Python
|
mit
|
ErikGartner/ghostdoc-ritter
|
import re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
feat: Update to Sentimental 2.2.x with undersampling
|
import re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2, undersample=True)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
|
<commit_before>import re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
<commit_msg>feat: Update to Sentimental 2.2.x with undersampling<commit_after>
|
import re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2, undersample=True)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
|
import re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
feat: Update to Sentimental 2.2.x with undersamplingimport re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2, undersample=True)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
|
<commit_before>import re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
<commit_msg>feat: Update to Sentimental 2.2.x with undersampling<commit_after>import re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2, undersample=True)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
|
93be15b7f74673247eeabc208fd56cc6cb735e43
|
tests/matchers/test_contain.py
|
tests/matchers/test_contain.py
|
import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain({1, 2, 3}, 1).matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain({1, 2, 3}, 4).matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
|
import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
|
Remove sets from tests Since python 2.6 does not have literal set syntax
|
Remove sets from tests
Since python 2.6 does not have literal set syntax
|
Python
|
mit
|
vesln/robber.py,taoenator/robber.py
|
import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain({1, 2, 3}, 1).matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain({1, 2, 3}, 4).matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
Remove sets from tests
Since python 2.6 does not have literal set syntax
|
import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
|
<commit_before>import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain({1, 2, 3}, 1).matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain({1, 2, 3}, 4).matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
<commit_msg>Remove sets from tests
Since python 2.6 does not have literal set syntax<commit_after>
|
import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
|
import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain({1, 2, 3}, 1).matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain({1, 2, 3}, 4).matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
Remove sets from tests
Since python 2.6 does not have literal set syntaximport unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
|
<commit_before>import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain({1, 2, 3}, 1).matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain({1, 2, 3}, 4).matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
<commit_msg>Remove sets from tests
Since python 2.6 does not have literal set syntax<commit_after>import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
|
6ee083f5b5a190f30f4916698c57c7ee1c2225fe
|
create_sample.py
|
create_sample.py
|
# importing modules/ libraries
import pandas as pd
import random
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv',
index = False)
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv',
index = False)
# create sample of orders data
n = 3421083
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_sample_df = pd.read_csv('Data/orders.csv',
skiprows = skip)
order_sample_df.to_csv('Data/orders_sample.csv',
index = False)
|
# importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
index = 'order_Id', skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv')
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
index = 'order_Id', skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv')
# create sample of orders data
prior_unique_ids = np.array(order_products__prior_sample_df.index.unique())
train_unique_ids = np.array(order_products__train_sample_df.index.unique())
match_ids = np.concatenate((prior_unique_ids, train_unique_ids), axis = 0)
order_sample_df = pd.read_csv('Data/orders.csv', index_col = 'order_id')
order_sample_df = order_sample_df.loc[match_ids,:]
order_sample_df.to_csv('Data/orders_sample.csv')
|
Change create sample code to ensure matching order ids data
|
fix: Change create sample code to ensure matching order ids data
|
Python
|
mit
|
rjegankumar/instacart_prediction_model
|
# importing modules/ libraries
import pandas as pd
import random
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv',
index = False)
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv',
index = False)
# create sample of orders data
n = 3421083
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_sample_df = pd.read_csv('Data/orders.csv',
skiprows = skip)
order_sample_df.to_csv('Data/orders_sample.csv',
index = False)fix: Change create sample code to ensure matching order ids data
|
# importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
index = 'order_Id', skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv')
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
index = 'order_Id', skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv')
# create sample of orders data
prior_unique_ids = np.array(order_products__prior_sample_df.index.unique())
train_unique_ids = np.array(order_products__train_sample_df.index.unique())
match_ids = np.concatenate((prior_unique_ids, train_unique_ids), axis = 0)
order_sample_df = pd.read_csv('Data/orders.csv', index_col = 'order_id')
order_sample_df = order_sample_df.loc[match_ids,:]
order_sample_df.to_csv('Data/orders_sample.csv')
|
<commit_before># importing modules/ libraries
import pandas as pd
import random
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv',
index = False)
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv',
index = False)
# create sample of orders data
n = 3421083
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_sample_df = pd.read_csv('Data/orders.csv',
skiprows = skip)
order_sample_df.to_csv('Data/orders_sample.csv',
index = False)<commit_msg>fix: Change create sample code to ensure matching order ids data<commit_after>
|
# importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
index = 'order_Id', skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv')
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
index = 'order_Id', skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv')
# create sample of orders data
prior_unique_ids = np.array(order_products__prior_sample_df.index.unique())
train_unique_ids = np.array(order_products__train_sample_df.index.unique())
match_ids = np.concatenate((prior_unique_ids, train_unique_ids), axis = 0)
order_sample_df = pd.read_csv('Data/orders.csv', index_col = 'order_id')
order_sample_df = order_sample_df.loc[match_ids,:]
order_sample_df.to_csv('Data/orders_sample.csv')
|
# importing modules/ libraries
import pandas as pd
import random
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv',
index = False)
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv',
index = False)
# create sample of orders data
n = 3421083
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_sample_df = pd.read_csv('Data/orders.csv',
skiprows = skip)
order_sample_df.to_csv('Data/orders_sample.csv',
index = False)fix: Change create sample code to ensure matching order ids data# importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
index = 'order_Id', skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv')
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
index = 'order_Id', skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv')
# create sample of orders data
prior_unique_ids = np.array(order_products__prior_sample_df.index.unique())
train_unique_ids = np.array(order_products__train_sample_df.index.unique())
match_ids = np.concatenate((prior_unique_ids, train_unique_ids), axis = 0)
order_sample_df = pd.read_csv('Data/orders.csv', index_col = 'order_id')
order_sample_df = order_sample_df.loc[match_ids,:]
order_sample_df.to_csv('Data/orders_sample.csv')
|
<commit_before># importing modules/ libraries
import pandas as pd
import random
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv',
index = False)
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv',
index = False)
# create sample of orders data
n = 3421083
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_sample_df = pd.read_csv('Data/orders.csv',
skiprows = skip)
order_sample_df.to_csv('Data/orders_sample.csv',
index = False)<commit_msg>fix: Change create sample code to ensure matching order ids data<commit_after># importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
index = 'order_Id', skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv')
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
index = 'order_Id', skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv')
# create sample of orders data
prior_unique_ids = np.array(order_products__prior_sample_df.index.unique())
train_unique_ids = np.array(order_products__train_sample_df.index.unique())
match_ids = np.concatenate((prior_unique_ids, train_unique_ids), axis = 0)
order_sample_df = pd.read_csv('Data/orders.csv', index_col = 'order_id')
order_sample_df = order_sample_df.loc[match_ids,:]
order_sample_df.to_csv('Data/orders_sample.csv')
|
acc3888ef55d7df22df08b16cc746186fc1a75c7
|
main.py
|
main.py
|
#!/usr/bin/env python3
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3.6
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
else:
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except:
pass
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
|
Use uvloop because apparently it's fast.
|
Use uvloop because apparently it's fast.
|
Python
|
mit
|
PJB3005/MoMMI,PJB3005/MoMMI,PJB3005/MoMMI
|
#!/usr/bin/env python3
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
Use uvloop because apparently it's fast.
|
#!/usr/bin/env python3.6
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
else:
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except:
pass
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
<commit_msg>Use uvloop because apparently it's fast.<commit_after>
|
#!/usr/bin/env python3.6
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
else:
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except:
pass
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
Use uvloop because apparently it's fast.#!/usr/bin/env python3.6
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
else:
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except:
pass
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
<commit_msg>Use uvloop because apparently it's fast.<commit_after>#!/usr/bin/env python3.6
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
else:
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except:
pass
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
|
3fb56e434182e5b28dcad0c547b0326ebe5be352
|
main.py
|
main.py
|
from createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
|
from createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def writeCollectionToFile(collectionFileName, arguments):
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
writeCollectionToFile(collectionFileName, arguments)
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
|
Refactor create action into function
|
Refactor create action into function
|
Python
|
apache-2.0
|
AmosGarner/PyInventory
|
from createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
Refactor create action into function
|
from createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def writeCollectionToFile(collectionFileName, arguments):
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
writeCollectionToFile(collectionFileName, arguments)
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
|
<commit_before>from createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
<commit_msg>Refactor create action into function<commit_after>
|
from createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def writeCollectionToFile(collectionFileName, arguments):
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
writeCollectionToFile(collectionFileName, arguments)
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
|
from createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
Refactor create action into functionfrom createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def writeCollectionToFile(collectionFileName, arguments):
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
writeCollectionToFile(collectionFileName, arguments)
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
|
<commit_before>from createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
<commit_msg>Refactor create action into function<commit_after>from createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def writeCollectionToFile(collectionFileName, arguments):
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
writeCollectionToFile(collectionFileName, arguments)
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
|
f34f67247d97d75039c65f71da8489fbffa17575
|
snowpenguin/django/recaptcha2/tests.py
|
snowpenguin/django/recaptcha2/tests.py
|
import os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
|
import os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def test_dummy_error(self):
del os.environ['RECAPTCHA_DISABLE']
form = RecaptchaTestForm({})
self.assertFalse(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
|
Check possible exception with wrong key data
|
Check possible exception with wrong key data
|
Python
|
lgpl-2.1
|
kbytesys/django-recaptcha2,kbytesys/django-recaptcha2
|
import os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
Check possible exception with wrong key data
|
import os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def test_dummy_error(self):
del os.environ['RECAPTCHA_DISABLE']
form = RecaptchaTestForm({})
self.assertFalse(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
|
<commit_before>import os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
<commit_msg>Check possible exception with wrong key data<commit_after>
|
import os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def test_dummy_error(self):
del os.environ['RECAPTCHA_DISABLE']
form = RecaptchaTestForm({})
self.assertFalse(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
|
import os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
Check possible exception with wrong key dataimport os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def test_dummy_error(self):
del os.environ['RECAPTCHA_DISABLE']
form = RecaptchaTestForm({})
self.assertFalse(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
|
<commit_before>import os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
<commit_msg>Check possible exception with wrong key data<commit_after>import os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def test_dummy_error(self):
del os.environ['RECAPTCHA_DISABLE']
form = RecaptchaTestForm({})
self.assertFalse(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
|
969aed7046e4965962e8ed5daa9c557baffc48bc
|
glue_h5part/io.py
|
glue_h5part/io.py
|
import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
data[attribute] = group[attribute].value
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
|
import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
try:
data[attribute] = group[attribute].value
except AttributeError:
pass
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
|
Fix issue with HDF5 objects that don't have a value
|
Fix issue with HDF5 objects that don't have a value
|
Python
|
bsd-2-clause
|
glue-viz/glue-h5part
|
import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
data[attribute] = group[attribute].value
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
Fix issue with HDF5 objects that don't have a value
|
import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
try:
data[attribute] = group[attribute].value
except AttributeError:
pass
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
|
<commit_before>import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
data[attribute] = group[attribute].value
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
<commit_msg>Fix issue with HDF5 objects that don't have a value <commit_after>
|
import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
try:
data[attribute] = group[attribute].value
except AttributeError:
pass
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
|
import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
data[attribute] = group[attribute].value
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
Fix issue with HDF5 objects that don't have a value import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
try:
data[attribute] = group[attribute].value
except AttributeError:
pass
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
|
<commit_before>import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
data[attribute] = group[attribute].value
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
<commit_msg>Fix issue with HDF5 objects that don't have a value <commit_after>import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
try:
data[attribute] = group[attribute].value
except AttributeError:
pass
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
|
a9c1cc5517f2e32c812cf041359a64fea9af9bad
|
main.py
|
main.py
|
#!/usr/bin/python3
from ANN import ANN
from random import seed as srand, randint
from time import time
srand(time())
# Test data for a XOR gate
testData = [
[0.1, 0.1, 0.9],
[0.1, 0.9, 0.9],
[0.9, 0.1, 0.9],
[0.9, 0.9, 0.1]
]
# Create ANN with 2 input neurons, 1 hidden layer with 3 neurons,
# 1 output neuron, and a learning rate of 10.0
net = ANN([2, 3, 1], 3.0)
# Train network
for i in range(10000):
#testRow = testData[i % len(testData)]
testRow = testData[randint(0, len(testData)-1)]
net.feedforward(testRow[:-1])
# Calculate and display error squared
print("err: " + str(net.errSqr(testRow[-1:])))
net.backpropagate(testRow[-1:])
accuracy = 0.0
for testRow in testData:
net.feedforward(testRow[:-1])
accuracy += net.errSqr(testRow[-1:])
matching = (
(testRow[-1] >= 0.45 and net.out[-1] >= 0.45) or
(testRow[-1] < 0.45 and net.out[-1] < 0.45)
)
print(str(testRow[0]) +
"\t" + str(testRow[1]) +
"\t:\t" +
str(net.out[0]) +
"\t" +
("GOOD" if matching else "BAD")
)
accuracy /= len(testData)
print("Aggregate accuracy: " + str(accuracy))
|
#!/usr/bin/python3
"""
Uses a genetic algorithm to determine the optimal number of layers,
neurons per each layer, learning rate and training iterations for an ANN given
a set of training data.
When running this script via a command line, it can take one optional argument
for the name of a file to stream output into in place of stdout.
"""
import logging
import simulate
import sys
# Evaluate command line arguments.
if len(sys.argv) > 1:
try:
output = open(sys.argv[1], 'w')
except IOError:
output = sys.stdout
output.write("Error: can't open {} for writing")
output.write("Output will be pushed to stdout")
else:
simulate.setOutput(output)
else:
output = sys.stdout
logging.basicConfig(stream=output, level=logging.DEBUG)
try:
simulate.simulate()
except:
logging.exception("Got exception on main handler")
raise
|
Add description and user-end functionality
|
Add description and user-end functionality
Main.py takes one optional parameter: a filepath for a log file.
If left empty, all output goes into stdout.
|
Python
|
mit
|
JoshuaBrockschmidt/ideal_ANN
|
#!/usr/bin/python3
from ANN import ANN
from random import seed as srand, randint
from time import time
srand(time())
# Test data for a XOR gate
testData = [
[0.1, 0.1, 0.9],
[0.1, 0.9, 0.9],
[0.9, 0.1, 0.9],
[0.9, 0.9, 0.1]
]
# Create ANN with 2 input neurons, 1 hidden layer with 3 neurons,
# 1 output neuron, and a learning rate of 10.0
net = ANN([2, 3, 1], 3.0)
# Train network
for i in range(10000):
#testRow = testData[i % len(testData)]
testRow = testData[randint(0, len(testData)-1)]
net.feedforward(testRow[:-1])
# Calculate and display error squared
print("err: " + str(net.errSqr(testRow[-1:])))
net.backpropagate(testRow[-1:])
accuracy = 0.0
for testRow in testData:
net.feedforward(testRow[:-1])
accuracy += net.errSqr(testRow[-1:])
matching = (
(testRow[-1] >= 0.45 and net.out[-1] >= 0.45) or
(testRow[-1] < 0.45 and net.out[-1] < 0.45)
)
print(str(testRow[0]) +
"\t" + str(testRow[1]) +
"\t:\t" +
str(net.out[0]) +
"\t" +
("GOOD" if matching else "BAD")
)
accuracy /= len(testData)
print("Aggregate accuracy: " + str(accuracy))
Add description and user-end functionality
Main.py takes one optional parameter: a filepath for a log file.
If left empty, all output goes into stdout.
|
#!/usr/bin/python3
"""
Uses a genetic algorithm to determine the optimal number of layers,
neurons per each layer, learning rate and training iterations for an ANN given
a set of training data.
When running this script via a command line, it can take one optional argument
for the name of a file to stream output into in place of stdout.
"""
import logging
import simulate
import sys
# Evaluate command line arguments.
if len(sys.argv) > 1:
try:
output = open(sys.argv[1], 'w')
except IOError:
output = sys.stdout
output.write("Error: can't open {} for writing")
output.write("Output will be pushed to stdout")
else:
simulate.setOutput(output)
else:
output = sys.stdout
logging.basicConfig(stream=output, level=logging.DEBUG)
try:
simulate.simulate()
except:
logging.exception("Got exception on main handler")
raise
|
<commit_before>#!/usr/bin/python3
from ANN import ANN
from random import seed as srand, randint
from time import time
srand(time())
# Test data for a XOR gate
testData = [
[0.1, 0.1, 0.9],
[0.1, 0.9, 0.9],
[0.9, 0.1, 0.9],
[0.9, 0.9, 0.1]
]
# Create ANN with 2 input neurons, 1 hidden layer with 3 neurons,
# 1 output neuron, and a learning rate of 10.0
net = ANN([2, 3, 1], 3.0)
# Train network
for i in range(10000):
#testRow = testData[i % len(testData)]
testRow = testData[randint(0, len(testData)-1)]
net.feedforward(testRow[:-1])
# Calculate and display error squared
print("err: " + str(net.errSqr(testRow[-1:])))
net.backpropagate(testRow[-1:])
accuracy = 0.0
for testRow in testData:
net.feedforward(testRow[:-1])
accuracy += net.errSqr(testRow[-1:])
matching = (
(testRow[-1] >= 0.45 and net.out[-1] >= 0.45) or
(testRow[-1] < 0.45 and net.out[-1] < 0.45)
)
print(str(testRow[0]) +
"\t" + str(testRow[1]) +
"\t:\t" +
str(net.out[0]) +
"\t" +
("GOOD" if matching else "BAD")
)
accuracy /= len(testData)
print("Aggregate accuracy: " + str(accuracy))
<commit_msg>Add description and user-end functionality
Main.py takes one optional parameter: a filepath for a log file.
If left empty, all output goes into stdout.<commit_after>
|
#!/usr/bin/python3
"""
Uses a genetic algorithm to determine the optimal number of layers,
neurons per each layer, learning rate and training iterations for an ANN given
a set of training data.
When running this script via a command line, it can take one optional argument
for the name of a file to stream output into in place of stdout.
"""
import logging
import simulate
import sys
# Evaluate command line arguments.
if len(sys.argv) > 1:
try:
output = open(sys.argv[1], 'w')
except IOError:
output = sys.stdout
output.write("Error: can't open {} for writing")
output.write("Output will be pushed to stdout")
else:
simulate.setOutput(output)
else:
output = sys.stdout
logging.basicConfig(stream=output, level=logging.DEBUG)
try:
simulate.simulate()
except:
logging.exception("Got exception on main handler")
raise
|
#!/usr/bin/python3
from ANN import ANN
from random import seed as srand, randint
from time import time
srand(time())
# Test data for a XOR gate
testData = [
[0.1, 0.1, 0.9],
[0.1, 0.9, 0.9],
[0.9, 0.1, 0.9],
[0.9, 0.9, 0.1]
]
# Create ANN with 2 input neurons, 1 hidden layer with 3 neurons,
# 1 output neuron, and a learning rate of 10.0
net = ANN([2, 3, 1], 3.0)
# Train network
for i in range(10000):
#testRow = testData[i % len(testData)]
testRow = testData[randint(0, len(testData)-1)]
net.feedforward(testRow[:-1])
# Calculate and display error squared
print("err: " + str(net.errSqr(testRow[-1:])))
net.backpropagate(testRow[-1:])
accuracy = 0.0
for testRow in testData:
net.feedforward(testRow[:-1])
accuracy += net.errSqr(testRow[-1:])
matching = (
(testRow[-1] >= 0.45 and net.out[-1] >= 0.45) or
(testRow[-1] < 0.45 and net.out[-1] < 0.45)
)
print(str(testRow[0]) +
"\t" + str(testRow[1]) +
"\t:\t" +
str(net.out[0]) +
"\t" +
("GOOD" if matching else "BAD")
)
accuracy /= len(testData)
print("Aggregate accuracy: " + str(accuracy))
Add description and user-end functionality
Main.py takes one optional parameter: a filepath for a log file.
If left empty, all output goes into stdout.#!/usr/bin/python3
"""
Uses a genetic algorithm to determine the optimal number of layers,
neurons per each layer, learning rate and training iterations for an ANN given
a set of training data.
When running this script via a command line, it can take one optional argument
for the name of a file to stream output into in place of stdout.
"""
import logging
import simulate
import sys
# Evaluate command line arguments.
if len(sys.argv) > 1:
try:
output = open(sys.argv[1], 'w')
except IOError:
output = sys.stdout
output.write("Error: can't open {} for writing")
output.write("Output will be pushed to stdout")
else:
simulate.setOutput(output)
else:
output = sys.stdout
logging.basicConfig(stream=output, level=logging.DEBUG)
try:
simulate.simulate()
except:
logging.exception("Got exception on main handler")
raise
|
<commit_before>#!/usr/bin/python3
from ANN import ANN
from random import seed as srand, randint
from time import time
srand(time())
# Test data for a XOR gate
testData = [
[0.1, 0.1, 0.9],
[0.1, 0.9, 0.9],
[0.9, 0.1, 0.9],
[0.9, 0.9, 0.1]
]
# Create ANN with 2 input neurons, 1 hidden layer with 3 neurons,
# 1 output neuron, and a learning rate of 10.0
net = ANN([2, 3, 1], 3.0)
# Train network
for i in range(10000):
#testRow = testData[i % len(testData)]
testRow = testData[randint(0, len(testData)-1)]
net.feedforward(testRow[:-1])
# Calculate and display error squared
print("err: " + str(net.errSqr(testRow[-1:])))
net.backpropagate(testRow[-1:])
accuracy = 0.0
for testRow in testData:
net.feedforward(testRow[:-1])
accuracy += net.errSqr(testRow[-1:])
matching = (
(testRow[-1] >= 0.45 and net.out[-1] >= 0.45) or
(testRow[-1] < 0.45 and net.out[-1] < 0.45)
)
print(str(testRow[0]) +
"\t" + str(testRow[1]) +
"\t:\t" +
str(net.out[0]) +
"\t" +
("GOOD" if matching else "BAD")
)
accuracy /= len(testData)
print("Aggregate accuracy: " + str(accuracy))
<commit_msg>Add description and user-end functionality
Main.py takes one optional parameter: a filepath for a log file.
If left empty, all output goes into stdout.<commit_after>#!/usr/bin/python3
"""
Uses a genetic algorithm to determine the optimal number of layers,
neurons per each layer, learning rate and training iterations for an ANN given
a set of training data.
When running this script via a command line, it can take one optional argument
for the name of a file to stream output into in place of stdout.
"""
import logging
import simulate
import sys
# Evaluate command line arguments.
if len(sys.argv) > 1:
try:
output = open(sys.argv[1], 'w')
except IOError:
output = sys.stdout
output.write("Error: can't open {} for writing")
output.write("Output will be pushed to stdout")
else:
simulate.setOutput(output)
else:
output = sys.stdout
logging.basicConfig(stream=output, level=logging.DEBUG)
try:
simulate.simulate()
except:
logging.exception("Got exception on main handler")
raise
|
adee7a2530d22d1242f89cddc84795efd1d02653
|
imagesift/cms_plugins.py
|
imagesift/cms_plugins.py
|
import datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
if limit:
qs = qs[:limit]
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = list(qs)
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin)
|
import datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
# there's no way around listing, sorry.
qs = list(qs)
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
# sort before limit
qs.sort(key=lambda i: i.overrideable_date())
if limit:
qs = qs[:limit]
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin)
|
Sort returned images by date, taking into account overrides
|
Sort returned images by date, taking into account overrides
|
Python
|
bsd-3-clause
|
topiaruss/cmsplugin-imagesift,topiaruss/cmsplugin-imagesift,topiaruss/cmsplugin-imagesift
|
import datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
if limit:
qs = qs[:limit]
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = list(qs)
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin)Sort returned images by date, taking into account overrides
|
import datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
# there's no way around listing, sorry.
qs = list(qs)
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
# sort before limit
qs.sort(key=lambda i: i.overrideable_date())
if limit:
qs = qs[:limit]
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin)
|
<commit_before>import datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
if limit:
qs = qs[:limit]
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = list(qs)
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin)<commit_msg>Sort returned images by date, taking into account overrides<commit_after>
|
import datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
# there's no way around listing, sorry.
qs = list(qs)
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
# sort before limit
qs.sort(key=lambda i: i.overrideable_date())
if limit:
qs = qs[:limit]
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin)
|
import datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
if limit:
qs = qs[:limit]
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = list(qs)
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin)Sort returned images by date, taking into account overridesimport datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
# there's no way around listing, sorry.
qs = list(qs)
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
# sort before limit
qs.sort(key=lambda i: i.overrideable_date())
if limit:
qs = qs[:limit]
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin)
|
<commit_before>import datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
if limit:
qs = qs[:limit]
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = list(qs)
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin)<commit_msg>Sort returned images by date, taking into account overrides<commit_after>import datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
# there's no way around listing, sorry.
qs = list(qs)
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
# sort before limit
qs.sort(key=lambda i: i.overrideable_date())
if limit:
qs = qs[:limit]
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin)
|
8f14126e36e7f5c15431cd7541762e485c3f8169
|
main.py
|
main.py
|
from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime
import json
def main():
#createCollection('agarner','books')
now = datetime.datetime.now()
items = []
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
itemCollection = Collection('Items', 'agarner', items)
print itemCollection.toJSON()
if __name__ == '__main__':
main()
|
from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path
CONST_COLLECTIONS_NAME = 'collections'
CONST_USERNAME = 'agarner'
CONST_COLLECTION = 'Items'
def generateItemsCollection():
items = []
now = datetime.datetime.now()
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
return Collection(CONST_COLLECTION, CONST_USERNAME, items)
def main():
createCollection(CONST_USERNAME,CONST_COLLECTION)
itemCollection = generateItemsCollection()
collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTION+'_'+'collection.dat'
if os.path.isfile(collectionsFilePath):
collectionFile = open(collectionsFilePath, 'w')
collectionFile.write(itemCollection.toJSON())
collectionFile.close()
if __name__ == '__main__':
main()
|
Implement ability to save json data to collection file
|
Implement ability to save json data to collection file
|
Python
|
apache-2.0
|
AmosGarner/PyInventory
|
from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime
import json
def main():
#createCollection('agarner','books')
now = datetime.datetime.now()
items = []
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
itemCollection = Collection('Items', 'agarner', items)
print itemCollection.toJSON()
if __name__ == '__main__':
main()
Implement ability to save json data to collection file
|
from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path
CONST_COLLECTIONS_NAME = 'collections'
CONST_USERNAME = 'agarner'
CONST_COLLECTION = 'Items'
def generateItemsCollection():
items = []
now = datetime.datetime.now()
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
return Collection(CONST_COLLECTION, CONST_USERNAME, items)
def main():
createCollection(CONST_USERNAME,CONST_COLLECTION)
itemCollection = generateItemsCollection()
collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTION+'_'+'collection.dat'
if os.path.isfile(collectionsFilePath):
collectionFile = open(collectionsFilePath, 'w')
collectionFile.write(itemCollection.toJSON())
collectionFile.close()
if __name__ == '__main__':
main()
|
<commit_before>from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime
import json
def main():
#createCollection('agarner','books')
now = datetime.datetime.now()
items = []
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
itemCollection = Collection('Items', 'agarner', items)
print itemCollection.toJSON()
if __name__ == '__main__':
main()
<commit_msg>Implement ability to save json data to collection file<commit_after>
|
from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path
CONST_COLLECTIONS_NAME = 'collections'
CONST_USERNAME = 'agarner'
CONST_COLLECTION = 'Items'
def generateItemsCollection():
items = []
now = datetime.datetime.now()
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
return Collection(CONST_COLLECTION, CONST_USERNAME, items)
def main():
createCollection(CONST_USERNAME,CONST_COLLECTION)
itemCollection = generateItemsCollection()
collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTION+'_'+'collection.dat'
if os.path.isfile(collectionsFilePath):
collectionFile = open(collectionsFilePath, 'w')
collectionFile.write(itemCollection.toJSON())
collectionFile.close()
if __name__ == '__main__':
main()
|
from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime
import json
def main():
#createCollection('agarner','books')
now = datetime.datetime.now()
items = []
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
itemCollection = Collection('Items', 'agarner', items)
print itemCollection.toJSON()
if __name__ == '__main__':
main()
Implement ability to save json data to collection filefrom createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path
CONST_COLLECTIONS_NAME = 'collections'
CONST_USERNAME = 'agarner'
CONST_COLLECTION = 'Items'
def generateItemsCollection():
items = []
now = datetime.datetime.now()
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
return Collection(CONST_COLLECTION, CONST_USERNAME, items)
def main():
createCollection(CONST_USERNAME,CONST_COLLECTION)
itemCollection = generateItemsCollection()
collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTION+'_'+'collection.dat'
if os.path.isfile(collectionsFilePath):
collectionFile = open(collectionsFilePath, 'w')
collectionFile.write(itemCollection.toJSON())
collectionFile.close()
if __name__ == '__main__':
main()
|
<commit_before>from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime
import json
def main():
#createCollection('agarner','books')
now = datetime.datetime.now()
items = []
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
itemCollection = Collection('Items', 'agarner', items)
print itemCollection.toJSON()
if __name__ == '__main__':
main()
<commit_msg>Implement ability to save json data to collection file<commit_after>from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path
CONST_COLLECTIONS_NAME = 'collections'
CONST_USERNAME = 'agarner'
CONST_COLLECTION = 'Items'
def generateItemsCollection():
items = []
now = datetime.datetime.now()
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
return Collection(CONST_COLLECTION, CONST_USERNAME, items)
def main():
createCollection(CONST_USERNAME,CONST_COLLECTION)
itemCollection = generateItemsCollection()
collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTION+'_'+'collection.dat'
if os.path.isfile(collectionsFilePath):
collectionFile = open(collectionsFilePath, 'w')
collectionFile.write(itemCollection.toJSON())
collectionFile.close()
if __name__ == '__main__':
main()
|
d63509e0d68a1dceabbbcf58432a92f7a4cbfd77
|
robot/robot/src/autonomous/main.py
|
robot/robot/src/autonomous/main.py
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self):
super().__init__()
print("Team 1418 robot code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init()
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self, drive, intake, catapult):
super().__init__()
print("Team 1418 autonomous code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init()
def on_enable(self):
time = wpilib.Timer()
timer.Start()
update (self, timer)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
self.Compressor.Start()
self.intake.armDown()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
self.drive.move(self,0,-1,0)
self.catapult.launch()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
if self.robot.ball_sensor!=.4:
self.intake.wheels()
self.drive.move(self,0,1,0)
elif self.robot.ball_sensor==.4:
self.drive.move(self,0,-1,0)
self.catapult.launch()
'''Do not implement your own loop for autonomous mode. Instead,
assume that
this function is called over and over and over again during
autonomous
mode if this mode is active
time_elapsed is a number that tells you how many seconds
autonomous mode has
been running so far.
'''
|
Bring the autonomous mode back
|
Bring the autonomous mode back
|
Python
|
bsd-3-clause
|
frc1418/2014
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self):
super().__init__()
print("Team 1418 robot code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init()Bring the autonomous mode back
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self, drive, intake, catapult):
super().__init__()
print("Team 1418 autonomous code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init()
def on_enable(self):
time = wpilib.Timer()
timer.Start()
update (self, timer)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
self.Compressor.Start()
self.intake.armDown()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
self.drive.move(self,0,-1,0)
self.catapult.launch()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
if self.robot.ball_sensor!=.4:
self.intake.wheels()
self.drive.move(self,0,1,0)
elif self.robot.ball_sensor==.4:
self.drive.move(self,0,-1,0)
self.catapult.launch()
'''Do not implement your own loop for autonomous mode. Instead,
assume that
this function is called over and over and over again during
autonomous
mode if this mode is active
time_elapsed is a number that tells you how many seconds
autonomous mode has
been running so far.
'''
|
<commit_before>
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self):
super().__init__()
print("Team 1418 robot code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init()<commit_msg>Bring the autonomous mode back<commit_after>
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self, drive, intake, catapult):
super().__init__()
print("Team 1418 autonomous code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init()
def on_enable(self):
time = wpilib.Timer()
timer.Start()
update (self, timer)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
self.Compressor.Start()
self.intake.armDown()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
self.drive.move(self,0,-1,0)
self.catapult.launch()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
if self.robot.ball_sensor!=.4:
self.intake.wheels()
self.drive.move(self,0,1,0)
elif self.robot.ball_sensor==.4:
self.drive.move(self,0,-1,0)
self.catapult.launch()
'''Do not implement your own loop for autonomous mode. Instead,
assume that
this function is called over and over and over again during
autonomous
mode if this mode is active
time_elapsed is a number that tells you how many seconds
autonomous mode has
been running so far.
'''
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self):
super().__init__()
print("Team 1418 robot code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init()Bring the autonomous mode back
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self, drive, intake, catapult):
super().__init__()
print("Team 1418 autonomous code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init()
def on_enable(self):
time = wpilib.Timer()
timer.Start()
update (self, timer)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
self.Compressor.Start()
self.intake.armDown()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
self.drive.move(self,0,-1,0)
self.catapult.launch()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
if self.robot.ball_sensor!=.4:
self.intake.wheels()
self.drive.move(self,0,1,0)
elif self.robot.ball_sensor==.4:
self.drive.move(self,0,-1,0)
self.catapult.launch()
'''Do not implement your own loop for autonomous mode. Instead,
assume that
this function is called over and over and over again during
autonomous
mode if this mode is active
time_elapsed is a number that tells you how many seconds
autonomous mode has
been running so far.
'''
|
<commit_before>
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self):
super().__init__()
print("Team 1418 robot code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init()<commit_msg>Bring the autonomous mode back<commit_after>
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self, drive, intake, catapult):
super().__init__()
print("Team 1418 autonomous code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init()
def on_enable(self):
time = wpilib.Timer()
timer.Start()
update (self, timer)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
self.Compressor.Start()
self.intake.armDown()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
self.drive.move(self,0,-1,0)
self.catapult.launch()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
if self.robot.ball_sensor!=.4:
self.intake.wheels()
self.drive.move(self,0,1,0)
elif self.robot.ball_sensor==.4:
self.drive.move(self,0,-1,0)
self.catapult.launch()
'''Do not implement your own loop for autonomous mode. Instead,
assume that
this function is called over and over and over again during
autonomous
mode if this mode is active
time_elapsed is a number that tells you how many seconds
autonomous mode has
been running so far.
'''
|
ba4ea2169a13d61d30c94e89db512a34bc0fe3b5
|
bluesky/tests/test_documents.py
|
bluesky/tests/test_documents.py
|
from bluesky.run_engine import RunEngine
from bluesky.tests.utils import setup_test_run_engine
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
|
import pytest
import jsonschema
from bluesky.run_engine import RunEngine
from event_model import DocumentNames, schemas
from bluesky.tests.utils import setup_test_run_engine
from bluesky.utils import new_uid
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
def test_dots_not_allowed_in_keys():
doc = {'time': 0,
'uid': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.start])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.start])
# Now add illegal key.
doc.update({'b.': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.start])
doc = {'time': 0,
'uid': new_uid(),
'data_keys': {'a': {'source': '',
'dtype': 'number',
'shape': []}},
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Now add illegal key.
doc.update({'b.c': 'd'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
doc = {'time': 0,
'uid': new_uid(),
'exit_status': 'success',
'reason': '',
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Now add illegal key.
doc.update({'.b': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.stop])
|
Test that event_model forbids dots in key names.
|
TST: Test that event_model forbids dots in key names.
|
Python
|
bsd-3-clause
|
ericdill/bluesky,ericdill/bluesky
|
from bluesky.run_engine import RunEngine
from bluesky.tests.utils import setup_test_run_engine
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
TST: Test that event_model forbids dots in key names.
|
import pytest
import jsonschema
from bluesky.run_engine import RunEngine
from event_model import DocumentNames, schemas
from bluesky.tests.utils import setup_test_run_engine
from bluesky.utils import new_uid
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
def test_dots_not_allowed_in_keys():
doc = {'time': 0,
'uid': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.start])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.start])
# Now add illegal key.
doc.update({'b.': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.start])
doc = {'time': 0,
'uid': new_uid(),
'data_keys': {'a': {'source': '',
'dtype': 'number',
'shape': []}},
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Now add illegal key.
doc.update({'b.c': 'd'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
doc = {'time': 0,
'uid': new_uid(),
'exit_status': 'success',
'reason': '',
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Now add illegal key.
doc.update({'.b': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.stop])
|
<commit_before>from bluesky.run_engine import RunEngine
from bluesky.tests.utils import setup_test_run_engine
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
<commit_msg>TST: Test that event_model forbids dots in key names.<commit_after>
|
import pytest
import jsonschema
from bluesky.run_engine import RunEngine
from event_model import DocumentNames, schemas
from bluesky.tests.utils import setup_test_run_engine
from bluesky.utils import new_uid
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
def test_dots_not_allowed_in_keys():
doc = {'time': 0,
'uid': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.start])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.start])
# Now add illegal key.
doc.update({'b.': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.start])
doc = {'time': 0,
'uid': new_uid(),
'data_keys': {'a': {'source': '',
'dtype': 'number',
'shape': []}},
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Now add illegal key.
doc.update({'b.c': 'd'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
doc = {'time': 0,
'uid': new_uid(),
'exit_status': 'success',
'reason': '',
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Now add illegal key.
doc.update({'.b': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.stop])
|
from bluesky.run_engine import RunEngine
from bluesky.tests.utils import setup_test_run_engine
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
TST: Test that event_model forbids dots in key names.import pytest
import jsonschema
from bluesky.run_engine import RunEngine
from event_model import DocumentNames, schemas
from bluesky.tests.utils import setup_test_run_engine
from bluesky.utils import new_uid
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
def test_dots_not_allowed_in_keys():
doc = {'time': 0,
'uid': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.start])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.start])
# Now add illegal key.
doc.update({'b.': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.start])
doc = {'time': 0,
'uid': new_uid(),
'data_keys': {'a': {'source': '',
'dtype': 'number',
'shape': []}},
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Now add illegal key.
doc.update({'b.c': 'd'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
doc = {'time': 0,
'uid': new_uid(),
'exit_status': 'success',
'reason': '',
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Now add illegal key.
doc.update({'.b': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.stop])
|
<commit_before>from bluesky.run_engine import RunEngine
from bluesky.tests.utils import setup_test_run_engine
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
<commit_msg>TST: Test that event_model forbids dots in key names.<commit_after>import pytest
import jsonschema
from bluesky.run_engine import RunEngine
from event_model import DocumentNames, schemas
from bluesky.tests.utils import setup_test_run_engine
from bluesky.utils import new_uid
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
def test_dots_not_allowed_in_keys():
doc = {'time': 0,
'uid': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.start])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.start])
# Now add illegal key.
doc.update({'b.': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.start])
doc = {'time': 0,
'uid': new_uid(),
'data_keys': {'a': {'source': '',
'dtype': 'number',
'shape': []}},
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Now add illegal key.
doc.update({'b.c': 'd'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
doc = {'time': 0,
'uid': new_uid(),
'exit_status': 'success',
'reason': '',
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Now add illegal key.
doc.update({'.b': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.stop])
|
aca1b138350434c9afb08f31164269cd58de1d2d
|
YouKnowShit/CheckFile.py
|
YouKnowShit/CheckFile.py
|
import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)
|
import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupupdir)
upupupupdir = os.path.abspath('..')
print(upupupupdir)
filenames = os.listdir(upupupupdir)
for file in filenames:
print(file)
|
Add a level of uper directory
|
Add a level of uper directory
|
Python
|
mit
|
jiangtianyu2009/PiSoftCake
|
import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)Add a level of uper directory
|
import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupupdir)
upupupupdir = os.path.abspath('..')
print(upupupupdir)
filenames = os.listdir(upupupupdir)
for file in filenames:
print(file)
|
<commit_before>import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)<commit_msg>Add a level of uper directory<commit_after>
|
import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupupdir)
upupupupdir = os.path.abspath('..')
print(upupupupdir)
filenames = os.listdir(upupupupdir)
for file in filenames:
print(file)
|
import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)Add a level of uper directoryimport os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupupdir)
upupupupdir = os.path.abspath('..')
print(upupupupdir)
filenames = os.listdir(upupupupdir)
for file in filenames:
print(file)
|
<commit_before>import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)<commit_msg>Add a level of uper directory<commit_after>import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupupdir)
upupupupdir = os.path.abspath('..')
print(upupupupdir)
filenames = os.listdir(upupupupdir)
for file in filenames:
print(file)
|
1f527bd99a35cf6396e6300369719b3f5f5490ff
|
app/main/forms.py
|
app/main/forms.py
|
from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
|
from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
from .. import data_api_client
class AdminEmailAddressValidator(object):
def __init__(self, message=None):
self.message = message
def __call__(self, form, field):
if not data_api_client.email_is_valid_for_admin_user(field.data):
raise validators.StopValidation(self.message)
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
|
Add new validator that applies data_api_client.email_is_valid_for_admin_user to field
|
Add new validator that applies data_api_client.email_is_valid_for_admin_user to field
|
Python
|
mit
|
alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
|
from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
Add new validator that applies data_api_client.email_is_valid_for_admin_user to field
|
from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
from .. import data_api_client
class AdminEmailAddressValidator(object):
def __init__(self, message=None):
self.message = message
def __call__(self, form, field):
if not data_api_client.email_is_valid_for_admin_user(field.data):
raise validators.StopValidation(self.message)
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
|
<commit_before>from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
<commit_msg>Add new validator that applies data_api_client.email_is_valid_for_admin_user to field<commit_after>
|
from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
from .. import data_api_client
class AdminEmailAddressValidator(object):
def __init__(self, message=None):
self.message = message
def __call__(self, form, field):
if not data_api_client.email_is_valid_for_admin_user(field.data):
raise validators.StopValidation(self.message)
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
|
from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
Add new validator that applies data_api_client.email_is_valid_for_admin_user to fieldfrom flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
from .. import data_api_client
class AdminEmailAddressValidator(object):
def __init__(self, message=None):
self.message = message
def __call__(self, form, field):
if not data_api_client.email_is_valid_for_admin_user(field.data):
raise validators.StopValidation(self.message)
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
|
<commit_before>from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
<commit_msg>Add new validator that applies data_api_client.email_is_valid_for_admin_user to field<commit_after>from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
from .. import data_api_client
class AdminEmailAddressValidator(object):
def __init__(self, message=None):
self.message = message
def __call__(self, form, field):
if not data_api_client.email_is_valid_for_admin_user(field.data):
raise validators.StopValidation(self.message)
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
|
62b7b01fe9a1d87692e97a6a75b52d542f8a43be
|
scrapi/processing/elastic_search.py
|
scrapi/processing/elastic_search.py
|
import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
logger = logging.getLogger(__name__)
logger.warn(es.count(index='share'))
|
import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
normalized['dateUpdated'] = self.version_dateUpdated(normalized)
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
def version_dateUpdated(self, normalized):
old_doc = es.get_source(
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
ignore=[404]
)
return old_doc['dateUpdated'] if old_doc else normalized['dateUpdated']
|
Add some versioning for dateUpdated so that updated documents aren't bumped to the top of the stream
|
Add some versioning for dateUpdated so that updated documents aren't bumped to the top of the stream
|
Python
|
apache-2.0
|
fabianvf/scrapi,felliott/scrapi,fabianvf/scrapi,mehanig/scrapi,CenterForOpenScience/scrapi,icereval/scrapi,mehanig/scrapi,felliott/scrapi,ostwald/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,jeffreyliu3230/scrapi,alexgarciac/scrapi
|
import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
logger = logging.getLogger(__name__)
logger.warn(es.count(index='share'))
Add some versioning for dateUpdated so that updated documents aren't bumped to the top of the stream
|
import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
normalized['dateUpdated'] = self.version_dateUpdated(normalized)
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
def version_dateUpdated(self, normalized):
old_doc = es.get_source(
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
ignore=[404]
)
return old_doc['dateUpdated'] if old_doc else normalized['dateUpdated']
|
<commit_before>import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
logger = logging.getLogger(__name__)
logger.warn(es.count(index='share'))
<commit_msg>Add some versioning for dateUpdated so that updated documents aren't bumped to the top of the stream<commit_after>
|
import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
normalized['dateUpdated'] = self.version_dateUpdated(normalized)
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
def version_dateUpdated(self, normalized):
old_doc = es.get_source(
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
ignore=[404]
)
return old_doc['dateUpdated'] if old_doc else normalized['dateUpdated']
|
import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
logger = logging.getLogger(__name__)
logger.warn(es.count(index='share'))
Add some versioning for dateUpdated so that updated documents aren't bumped to the top of the streamimport logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
normalized['dateUpdated'] = self.version_dateUpdated(normalized)
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
def version_dateUpdated(self, normalized):
old_doc = es.get_source(
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
ignore=[404]
)
return old_doc['dateUpdated'] if old_doc else normalized['dateUpdated']
|
<commit_before>import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
logger = logging.getLogger(__name__)
logger.warn(es.count(index='share'))
<commit_msg>Add some versioning for dateUpdated so that updated documents aren't bumped to the top of the stream<commit_after>import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
normalized['dateUpdated'] = self.version_dateUpdated(normalized)
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
def version_dateUpdated(self, normalized):
old_doc = es.get_source(
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
ignore=[404]
)
return old_doc['dateUpdated'] if old_doc else normalized['dateUpdated']
|
0460404bb7f3e9a9f6ece1c4a141b16fced6f741
|
tests/test_chunked_http.py
|
tests/test_chunked_http.py
|
from disco.test import TestCase, TestJob
from disco.core import Job
import disco
import threading
import BaseHTTPServer
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
|
from disco.test import TestCase, TestJob
from disco.core import Job
from disco.compat import http_server
import disco
import threading
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(http_server.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = http_server.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
|
Use the disco.compat.http_server to work with python3.
|
Use the disco.compat.http_server to work with python3.
|
Python
|
bsd-3-clause
|
pombredanne/disco,simudream/disco,ErikDubbelboer/disco,beni55/disco,discoproject/disco,ErikDubbelboer/disco,oldmantaiter/disco,simudream/disco,oldmantaiter/disco,seabirdzh/disco,seabirdzh/disco,seabirdzh/disco,discoproject/disco,ktkt2009/disco,discoproject/disco,ktkt2009/disco,beni55/disco,ErikDubbelboer/disco,pombredanne/disco,mozilla/disco,mozilla/disco,discoproject/disco,beni55/disco,mwilliams3/disco,mwilliams3/disco,pombredanne/disco,simudream/disco,pombredanne/disco,seabirdzh/disco,ErikDubbelboer/disco,ktkt2009/disco,beni55/disco,ErikDubbelboer/disco,ktkt2009/disco,seabirdzh/disco,beni55/disco,pooya/disco,mozilla/disco,mozilla/disco,pooya/disco,ktkt2009/disco,oldmantaiter/disco,discoproject/disco,pooya/disco,simudream/disco,oldmantaiter/disco,pombredanne/disco,oldmantaiter/disco,mwilliams3/disco,mwilliams3/disco,simudream/disco,pooya/disco,mwilliams3/disco
|
from disco.test import TestCase, TestJob
from disco.core import Job
import disco
import threading
import BaseHTTPServer
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
Use the disco.compat.http_server to work with python3.
|
from disco.test import TestCase, TestJob
from disco.core import Job
from disco.compat import http_server
import disco
import threading
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(http_server.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = http_server.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
|
<commit_before>from disco.test import TestCase, TestJob
from disco.core import Job
import disco
import threading
import BaseHTTPServer
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
<commit_msg>Use the disco.compat.http_server to work with python3.<commit_after>
|
from disco.test import TestCase, TestJob
from disco.core import Job
from disco.compat import http_server
import disco
import threading
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(http_server.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = http_server.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
|
from disco.test import TestCase, TestJob
from disco.core import Job
import disco
import threading
import BaseHTTPServer
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
Use the disco.compat.http_server to work with python3.from disco.test import TestCase, TestJob
from disco.core import Job
from disco.compat import http_server
import disco
import threading
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(http_server.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = http_server.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
|
<commit_before>from disco.test import TestCase, TestJob
from disco.core import Job
import disco
import threading
import BaseHTTPServer
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
<commit_msg>Use the disco.compat.http_server to work with python3.<commit_after>from disco.test import TestCase, TestJob
from disco.core import Job
from disco.compat import http_server
import disco
import threading
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(http_server.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = http_server.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
|
dd5b8c55e601709f1f04cb0ec7dbde63b84801d8
|
snippet_parser/fr.py
|
snippet_parser/fr.py
|
#-*- encoding: utf-8 -*-
import base
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
|
#-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = template.params[0]
if len(template.params) == 2:
ret += template.params[1]
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
|
Implement a couple of other French templates.
|
Implement a couple of other French templates.
Still need to add tests for these.
Former-commit-id: 4021d27a7bd15a396b637beb57c10fc95936cb3f
|
Python
|
mit
|
eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,eggpi/citationhunt
|
#-*- encoding: utf-8 -*-
import base
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
Implement a couple of other French templates.
Still need to add tests for these.
Former-commit-id: 4021d27a7bd15a396b637beb57c10fc95936cb3f
|
#-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = template.params[0]
if len(template.params) == 2:
ret += template.params[1]
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
|
<commit_before>#-*- encoding: utf-8 -*-
import base
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
<commit_msg>Implement a couple of other French templates.
Still need to add tests for these.
Former-commit-id: 4021d27a7bd15a396b637beb57c10fc95936cb3f<commit_after>
|
#-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = template.params[0]
if len(template.params) == 2:
ret += template.params[1]
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
|
#-*- encoding: utf-8 -*-
import base
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
Implement a couple of other French templates.
Still need to add tests for these.
Former-commit-id: 4021d27a7bd15a396b637beb57c10fc95936cb3f#-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = template.params[0]
if len(template.params) == 2:
ret += template.params[1]
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
|
<commit_before>#-*- encoding: utf-8 -*-
import base
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
<commit_msg>Implement a couple of other French templates.
Still need to add tests for these.
Former-commit-id: 4021d27a7bd15a396b637beb57c10fc95936cb3f<commit_after>#-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = template.params[0]
if len(template.params) == 2:
ret += template.params[1]
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
|
878c14e04327f2f9d2d4acd22de21ed23b0cfb9a
|
skan/test/test_vendored_correlate.py
|
skan/test/test_vendored_correlate.py
|
from time import time
import numpy as np
from skan.vendored import thresholding as th
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
|
from time import time
from functools import reduce
import numpy as np
from skan.vendored import thresholding as th
from skimage.transform import integral_image
from scipy import ndimage as ndi
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
def test_reference_correlation():
ndim = 4
shape = np.random.randint(0, 20, size=ndim)
x = np.random.random(shape)
kern = reduce(np.outer, [[-1, 0, 0, 1]] * ndim).reshape((4,) * ndim)
px = np.pad(x, (2, 1), mode='reflect')
pxi = integral_image(px)
mean_fast = th.correlate_nonzeros(pxi, kern / 3 ** ndim)
mean_ref = ndi.correlate(x, np.ones((3,) * ndim) / 3 ** ndim,
mode='mirror')
np.testing.assert_allclose(mean_fast, mean_ref)
|
Add test for new fast correlation
|
Add test for new fast correlation
|
Python
|
bsd-3-clause
|
jni/skan
|
from time import time
import numpy as np
from skan.vendored import thresholding as th
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
Add test for new fast correlation
|
from time import time
from functools import reduce
import numpy as np
from skan.vendored import thresholding as th
from skimage.transform import integral_image
from scipy import ndimage as ndi
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
def test_reference_correlation():
ndim = 4
shape = np.random.randint(0, 20, size=ndim)
x = np.random.random(shape)
kern = reduce(np.outer, [[-1, 0, 0, 1]] * ndim).reshape((4,) * ndim)
px = np.pad(x, (2, 1), mode='reflect')
pxi = integral_image(px)
mean_fast = th.correlate_nonzeros(pxi, kern / 3 ** ndim)
mean_ref = ndi.correlate(x, np.ones((3,) * ndim) / 3 ** ndim,
mode='mirror')
np.testing.assert_allclose(mean_fast, mean_ref)
|
<commit_before>from time import time
import numpy as np
from skan.vendored import thresholding as th
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
<commit_msg>Add test for new fast correlation<commit_after>
|
from time import time
from functools import reduce
import numpy as np
from skan.vendored import thresholding as th
from skimage.transform import integral_image
from scipy import ndimage as ndi
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
def test_reference_correlation():
ndim = 4
shape = np.random.randint(0, 20, size=ndim)
x = np.random.random(shape)
kern = reduce(np.outer, [[-1, 0, 0, 1]] * ndim).reshape((4,) * ndim)
px = np.pad(x, (2, 1), mode='reflect')
pxi = integral_image(px)
mean_fast = th.correlate_nonzeros(pxi, kern / 3 ** ndim)
mean_ref = ndi.correlate(x, np.ones((3,) * ndim) / 3 ** ndim,
mode='mirror')
np.testing.assert_allclose(mean_fast, mean_ref)
|
from time import time
import numpy as np
from skan.vendored import thresholding as th
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
Add test for new fast correlationfrom time import time
from functools import reduce
import numpy as np
from skan.vendored import thresholding as th
from skimage.transform import integral_image
from scipy import ndimage as ndi
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
def test_reference_correlation():
ndim = 4
shape = np.random.randint(0, 20, size=ndim)
x = np.random.random(shape)
kern = reduce(np.outer, [[-1, 0, 0, 1]] * ndim).reshape((4,) * ndim)
px = np.pad(x, (2, 1), mode='reflect')
pxi = integral_image(px)
mean_fast = th.correlate_nonzeros(pxi, kern / 3 ** ndim)
mean_ref = ndi.correlate(x, np.ones((3,) * ndim) / 3 ** ndim,
mode='mirror')
np.testing.assert_allclose(mean_fast, mean_ref)
|
<commit_before>from time import time
import numpy as np
from skan.vendored import thresholding as th
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
<commit_msg>Add test for new fast correlation<commit_after>from time import time
from functools import reduce
import numpy as np
from skan.vendored import thresholding as th
from skimage.transform import integral_image
from scipy import ndimage as ndi
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
def test_reference_correlation():
ndim = 4
shape = np.random.randint(0, 20, size=ndim)
x = np.random.random(shape)
kern = reduce(np.outer, [[-1, 0, 0, 1]] * ndim).reshape((4,) * ndim)
px = np.pad(x, (2, 1), mode='reflect')
pxi = integral_image(px)
mean_fast = th.correlate_nonzeros(pxi, kern / 3 ** ndim)
mean_ref = ndi.correlate(x, np.ones((3,) * ndim) / 3 ** ndim,
mode='mirror')
np.testing.assert_allclose(mean_fast, mean_ref)
|
425a06b852b5abfbbd46cd82065daff9b8bf9f51
|
ehriportal/devel_settings.py
|
ehriportal/devel_settings.py
|
import sys
ADMINS = (
("Mike", "michael.bryant@kcl.ac.uk"),
)
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
if "test" in sys.argv:
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "test.db",
}
}
|
import sys
ADMINS = (
("Mike", "michael.bryant@kcl.ac.uk"),
)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
if "test" in sys.argv:
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "test.db",
}
}
|
Use terminal for dev email output
|
Use terminal for dev email output
|
Python
|
mit
|
mikesname/ehri-collections,mikesname/ehri-collections,mikesname/ehri-collections
|
import sys
ADMINS = (
("Mike", "michael.bryant@kcl.ac.uk"),
)
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
if "test" in sys.argv:
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "test.db",
}
}
Use terminal for dev email output
|
import sys
ADMINS = (
("Mike", "michael.bryant@kcl.ac.uk"),
)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
if "test" in sys.argv:
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "test.db",
}
}
|
<commit_before>
import sys
ADMINS = (
("Mike", "michael.bryant@kcl.ac.uk"),
)
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
if "test" in sys.argv:
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "test.db",
}
}
<commit_msg>Use terminal for dev email output<commit_after>
|
import sys
ADMINS = (
("Mike", "michael.bryant@kcl.ac.uk"),
)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
if "test" in sys.argv:
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "test.db",
}
}
|
import sys
ADMINS = (
("Mike", "michael.bryant@kcl.ac.uk"),
)
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
if "test" in sys.argv:
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "test.db",
}
}
Use terminal for dev email output
import sys
ADMINS = (
("Mike", "michael.bryant@kcl.ac.uk"),
)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
if "test" in sys.argv:
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "test.db",
}
}
|
<commit_before>
import sys
ADMINS = (
("Mike", "michael.bryant@kcl.ac.uk"),
)
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
if "test" in sys.argv:
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "test.db",
}
}
<commit_msg>Use terminal for dev email output<commit_after>
import sys
ADMINS = (
("Mike", "michael.bryant@kcl.ac.uk"),
)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
if "test" in sys.argv:
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "test.db",
}
}
|
98ba687e67c8d5a17560bed59f42dbe8e3fb0cf6
|
amaascore/books/enums.py
|
amaascore/books/enums.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Individual', 'Management', 'Trading', 'Wash'}
|
from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Management', 'Trading', 'Wash'}
|
Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639.
|
Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639.
|
Python
|
apache-2.0
|
amaas-fintech/amaas-core-sdk-python,nedlowe/amaas-core-sdk-python,paul-rs/amaas-core-sdk-python,amaas-fintech/amaas-core-sdk-python,nedlowe/amaas-core-sdk-python,paul-rs/amaas-core-sdk-python
|
from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Individual', 'Management', 'Trading', 'Wash'}
Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639.
|
from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Management', 'Trading', 'Wash'}
|
<commit_before>from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Individual', 'Management', 'Trading', 'Wash'}
<commit_msg>Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639.<commit_after>
|
from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Management', 'Trading', 'Wash'}
|
from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Individual', 'Management', 'Trading', 'Wash'}
Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639.from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Management', 'Trading', 'Wash'}
|
<commit_before>from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Individual', 'Management', 'Trading', 'Wash'}
<commit_msg>Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639.<commit_after>from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Management', 'Trading', 'Wash'}
|
4650b6730d925c4a5fde34ec4c2f9058763ab58b
|
cupcake/smush.py
|
cupcake/smush.py
|
"""
User-facing interface to all dimensionality reduction algorithms
"""
def smushplot(data, smusher):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher, n_components=2, marker='o', marker_order=None,
text=False, text_order=None, linewidth=1, linewidth_order=None,
edgecolor='k', edgecolor_order=None, smusher_kws=None,
plot_kws=None):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
Add a bunch of plotting and keyword arguments
|
Add a bunch of plotting and keyword arguments
|
Python
|
bsd-3-clause
|
olgabot/cupcake
|
"""
User-facing interface to all dimensionality reduction algorithms
"""
def smushplot(data, smusher):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
Add a bunch of plotting and keyword arguments
|
"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher, n_components=2, marker='o', marker_order=None,
text=False, text_order=None, linewidth=1, linewidth_order=None,
edgecolor='k', edgecolor_order=None, smusher_kws=None,
plot_kws=None):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
<commit_before>"""
User-facing interface to all dimensionality reduction algorithms
"""
def smushplot(data, smusher):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
<commit_msg>Add a bunch of plotting and keyword arguments<commit_after>
|
"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher, n_components=2, marker='o', marker_order=None,
text=False, text_order=None, linewidth=1, linewidth_order=None,
edgecolor='k', edgecolor_order=None, smusher_kws=None,
plot_kws=None):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
"""
User-facing interface to all dimensionality reduction algorithms
"""
def smushplot(data, smusher):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
Add a bunch of plotting and keyword arguments"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher, n_components=2, marker='o', marker_order=None,
text=False, text_order=None, linewidth=1, linewidth_order=None,
edgecolor='k', edgecolor_order=None, smusher_kws=None,
plot_kws=None):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
<commit_before>"""
User-facing interface to all dimensionality reduction algorithms
"""
def smushplot(data, smusher):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
<commit_msg>Add a bunch of plotting and keyword arguments<commit_after>"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher, n_components=2, marker='o', marker_order=None,
text=False, text_order=None, linewidth=1, linewidth_order=None,
edgecolor='k', edgecolor_order=None, smusher_kws=None,
plot_kws=None):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
fc7c08aecf9d247e54db70ae14c999902d6f6bfa
|
workflow/migrations/0024_auto_20180620_0537.py
|
workflow/migrations/0024_auto_20180620_0537.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(blank=True, null=True, default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
|
Fix the dashboard migration for UUID
|
Fix the dashboard migration for UUID
|
Python
|
apache-2.0
|
toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
Fix the dashboard migration for UUID
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(blank=True, null=True, default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
<commit_msg>Fix the dashboard migration for UUID<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(blank=True, null=True, default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
Fix the dashboard migration for UUID# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(blank=True, null=True, default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
<commit_msg>Fix the dashboard migration for UUID<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(blank=True, null=True, default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
|
9c6ad90f20354ca47a2fc56cc0d7ff6ebfc613d3
|
weather/weather-display.py
|
weather/weather-display.py
|
#!/usr/bin/env python
from subprocess import call
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_image():
call('wget -O "%s" "%s"' % (OUTPUT_FILE, URL), shell=True)
def main():
clear_screen()
get_image()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from subprocess import call
from datetime import datetime
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_dashboard(url, output_file):
call('rm %s' % output_file, shell=True)
call('wget -O "%s" "%s"' % (output_file, url), shell=True)
def set_dashboard_background(image_path):
call('eips -g %s' % image_path, shell=True)
def main():
now = datetime.now()
# Only execute after 6 and before 10:
if (now.hour >= 5) and (now.hour <= 11):
clear_screen()
get_dashboard(URL, OUTPUT_FILE)
set_dashboard_background(OUTPUT_FILE)
if __name__ == "__main__":
main()
|
Update weather display to be run during specified intervals.
|
Update weather display to be run during specified intervals.
|
Python
|
bsd-3-clause
|
alfredo/microdash,alfredo/microdash
|
#!/usr/bin/env python
from subprocess import call
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_image():
call('wget -O "%s" "%s"' % (OUTPUT_FILE, URL), shell=True)
def main():
clear_screen()
get_image()
if __name__ == "__main__":
main()
Update weather display to be run during specified intervals.
|
#!/usr/bin/env python
from subprocess import call
from datetime import datetime
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_dashboard(url, output_file):
call('rm %s' % output_file, shell=True)
call('wget -O "%s" "%s"' % (output_file, url), shell=True)
def set_dashboard_background(image_path):
call('eips -g %s' % image_path, shell=True)
def main():
now = datetime.now()
# Only execute after 6 and before 10:
if (now.hour >= 5) and (now.hour <= 11):
clear_screen()
get_dashboard(URL, OUTPUT_FILE)
set_dashboard_background(OUTPUT_FILE)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
from subprocess import call
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_image():
call('wget -O "%s" "%s"' % (OUTPUT_FILE, URL), shell=True)
def main():
clear_screen()
get_image()
if __name__ == "__main__":
main()
<commit_msg>Update weather display to be run during specified intervals.<commit_after>
|
#!/usr/bin/env python
from subprocess import call
from datetime import datetime
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_dashboard(url, output_file):
call('rm %s' % output_file, shell=True)
call('wget -O "%s" "%s"' % (output_file, url), shell=True)
def set_dashboard_background(image_path):
call('eips -g %s' % image_path, shell=True)
def main():
now = datetime.now()
# Only execute after 6 and before 10:
if (now.hour >= 5) and (now.hour <= 11):
clear_screen()
get_dashboard(URL, OUTPUT_FILE)
set_dashboard_background(OUTPUT_FILE)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from subprocess import call
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_image():
call('wget -O "%s" "%s"' % (OUTPUT_FILE, URL), shell=True)
def main():
clear_screen()
get_image()
if __name__ == "__main__":
main()
Update weather display to be run during specified intervals.#!/usr/bin/env python
from subprocess import call
from datetime import datetime
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_dashboard(url, output_file):
call('rm %s' % output_file, shell=True)
call('wget -O "%s" "%s"' % (output_file, url), shell=True)
def set_dashboard_background(image_path):
call('eips -g %s' % image_path, shell=True)
def main():
now = datetime.now()
# Only execute after 6 and before 10:
if (now.hour >= 5) and (now.hour <= 11):
clear_screen()
get_dashboard(URL, OUTPUT_FILE)
set_dashboard_background(OUTPUT_FILE)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
from subprocess import call
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_image():
call('wget -O "%s" "%s"' % (OUTPUT_FILE, URL), shell=True)
def main():
clear_screen()
get_image()
if __name__ == "__main__":
main()
<commit_msg>Update weather display to be run during specified intervals.<commit_after>#!/usr/bin/env python
from subprocess import call
from datetime import datetime
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_dashboard(url, output_file):
call('rm %s' % output_file, shell=True)
call('wget -O "%s" "%s"' % (output_file, url), shell=True)
def set_dashboard_background(image_path):
call('eips -g %s' % image_path, shell=True)
def main():
now = datetime.now()
# Only execute after 6 and before 10:
if (now.hour >= 5) and (now.hour <= 11):
clear_screen()
get_dashboard(URL, OUTPUT_FILE)
set_dashboard_background(OUTPUT_FILE)
if __name__ == "__main__":
main()
|
33ee2cdad20aab11ffdc76c4bd2bd5a82295e798
|
scripts/first_level_admin_clusters.py
|
scripts/first_level_admin_clusters.py
|
import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
next(reader)
for row in reader:
clusterfile[row[0].upper()] = {
"reserve": 5,
"standard": 10
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
|
import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
next(reader)
for row in reader:
clusterfile[row[0].upper()] = {
# FIXME this should be a real count of enumeration areas
# (EA/EA_NAME) per state (see #592) e.g.:
# { ABIA: { standard: 32, reserve: 4 } }
"reserve": 5,
"standard": 10
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
|
Add EA count bug note
|
Add EA count bug note
|
Python
|
agpl-3.0
|
eHealthAfrica/nutsurv,eHealthAfrica/nutsurv,johanneswilm/eha-nutsurv-django,eHealthAfrica/nutsurv,johanneswilm/eha-nutsurv-django,johanneswilm/eha-nutsurv-django
|
import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
next(reader)
for row in reader:
clusterfile[row[0].upper()] = {
"reserve": 5,
"standard": 10
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
Add EA count bug note
|
import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
next(reader)
for row in reader:
clusterfile[row[0].upper()] = {
# FIXME this should be a real count of enumeration areas
# (EA/EA_NAME) per state (see #592) e.g.:
# { ABIA: { standard: 32, reserve: 4 } }
"reserve": 5,
"standard": 10
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
|
<commit_before>import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
next(reader)
for row in reader:
clusterfile[row[0].upper()] = {
"reserve": 5,
"standard": 10
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
<commit_msg>Add EA count bug note<commit_after>
|
import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
next(reader)
for row in reader:
clusterfile[row[0].upper()] = {
# FIXME this should be a real count of enumeration areas
# (EA/EA_NAME) per state (see #592) e.g.:
# { ABIA: { standard: 32, reserve: 4 } }
"reserve": 5,
"standard": 10
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
|
import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
next(reader)
for row in reader:
clusterfile[row[0].upper()] = {
"reserve": 5,
"standard": 10
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
Add EA count bug noteimport csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
next(reader)
for row in reader:
clusterfile[row[0].upper()] = {
# FIXME this should be a real count of enumeration areas
# (EA/EA_NAME) per state (see #592) e.g.:
# { ABIA: { standard: 32, reserve: 4 } }
"reserve": 5,
"standard": 10
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
|
<commit_before>import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
next(reader)
for row in reader:
clusterfile[row[0].upper()] = {
"reserve": 5,
"standard": 10
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
<commit_msg>Add EA count bug note<commit_after>import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
next(reader)
for row in reader:
clusterfile[row[0].upper()] = {
# FIXME this should be a real count of enumeration areas
# (EA/EA_NAME) per state (see #592) e.g.:
# { ABIA: { standard: 32, reserve: 4 } }
"reserve": 5,
"standard": 10
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
|
a6785b6ec546ba31f52420fcf55f4f45b00926ca
|
info.py
|
info.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
from uuid import uuid1
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = 'ctl-%s@127.0.0.1' % str(uuid1())
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
import string
import random
class Info:
def __init__(self):
self.debug = False
def _remoteShellName(self):
tmp = ''.join(random.choice(string.ascii_letters) for i in xrange(20))
return 'ctl-%s@127.0.0.1' % tmp
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = self._remoteShellName()
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
Use just a random string instead of uuid in shell name.
|
Use just a random string instead of uuid in shell name.
python 2.4 lacks uuid module.
Change-Id: I5a1d5339741af2f4defa67d17f557639cd30bb91
Reviewed-on: http://review.couchbase.org/12462
Tested-by: Aliaksey Artamonau <3c875bcfb3adf2a65b2ae7686ca921e6c9433147@gmail.com>
Tested-by: Farshid Ghods <e312e45b3dfe8923eeb42f1475810bf7c59ba895@gmail.com>
Reviewed-by: Farshid Ghods <e312e45b3dfe8923eeb42f1475810bf7c59ba895@gmail.com>
|
Python
|
apache-2.0
|
membase/membase-cli,couchbaselabs/couchbase-cli,couchbaselabs/couchbase-cli,couchbase/couchbase-cli,membase/membase-cli,couchbase/couchbase-cli,membase/membase-cli,couchbaselabs/couchbase-cli
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
from uuid import uuid1
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = 'ctl-%s@127.0.0.1' % str(uuid1())
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
Use just a random string instead of uuid in shell name.
python 2.4 lacks uuid module.
Change-Id: I5a1d5339741af2f4defa67d17f557639cd30bb91
Reviewed-on: http://review.couchbase.org/12462
Tested-by: Aliaksey Artamonau <3c875bcfb3adf2a65b2ae7686ca921e6c9433147@gmail.com>
Tested-by: Farshid Ghods <e312e45b3dfe8923eeb42f1475810bf7c59ba895@gmail.com>
Reviewed-by: Farshid Ghods <e312e45b3dfe8923eeb42f1475810bf7c59ba895@gmail.com>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
import string
import random
class Info:
def __init__(self):
self.debug = False
def _remoteShellName(self):
tmp = ''.join(random.choice(string.ascii_letters) for i in xrange(20))
return 'ctl-%s@127.0.0.1' % tmp
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = self._remoteShellName()
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
from uuid import uuid1
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = 'ctl-%s@127.0.0.1' % str(uuid1())
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
<commit_msg>Use just a random string instead of uuid in shell name.
python 2.4 lacks uuid module.
Change-Id: I5a1d5339741af2f4defa67d17f557639cd30bb91
Reviewed-on: http://review.couchbase.org/12462
Tested-by: Aliaksey Artamonau <3c875bcfb3adf2a65b2ae7686ca921e6c9433147@gmail.com>
Tested-by: Farshid Ghods <e312e45b3dfe8923eeb42f1475810bf7c59ba895@gmail.com>
Reviewed-by: Farshid Ghods <e312e45b3dfe8923eeb42f1475810bf7c59ba895@gmail.com><commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
import string
import random
class Info:
def __init__(self):
self.debug = False
def _remoteShellName(self):
tmp = ''.join(random.choice(string.ascii_letters) for i in xrange(20))
return 'ctl-%s@127.0.0.1' % tmp
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = self._remoteShellName()
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
from uuid import uuid1
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = 'ctl-%s@127.0.0.1' % str(uuid1())
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
Use just a random string instead of uuid in shell name.
python 2.4 lacks uuid module.
Change-Id: I5a1d5339741af2f4defa67d17f557639cd30bb91
Reviewed-on: http://review.couchbase.org/12462
Tested-by: Aliaksey Artamonau <3c875bcfb3adf2a65b2ae7686ca921e6c9433147@gmail.com>
Tested-by: Farshid Ghods <e312e45b3dfe8923eeb42f1475810bf7c59ba895@gmail.com>
Reviewed-by: Farshid Ghods <e312e45b3dfe8923eeb42f1475810bf7c59ba895@gmail.com>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
import string
import random
class Info:
def __init__(self):
self.debug = False
def _remoteShellName(self):
tmp = ''.join(random.choice(string.ascii_letters) for i in xrange(20))
return 'ctl-%s@127.0.0.1' % tmp
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = self._remoteShellName()
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
from uuid import uuid1
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = 'ctl-%s@127.0.0.1' % str(uuid1())
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
<commit_msg>Use just a random string instead of uuid in shell name.
python 2.4 lacks uuid module.
Change-Id: I5a1d5339741af2f4defa67d17f557639cd30bb91
Reviewed-on: http://review.couchbase.org/12462
Tested-by: Aliaksey Artamonau <3c875bcfb3adf2a65b2ae7686ca921e6c9433147@gmail.com>
Tested-by: Farshid Ghods <e312e45b3dfe8923eeb42f1475810bf7c59ba895@gmail.com>
Reviewed-by: Farshid Ghods <e312e45b3dfe8923eeb42f1475810bf7c59ba895@gmail.com><commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
import string
import random
class Info:
def __init__(self):
self.debug = False
def _remoteShellName(self):
tmp = ''.join(random.choice(string.ascii_letters) for i in xrange(20))
return 'ctl-%s@127.0.0.1' % tmp
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = self._remoteShellName()
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
c1189bf7c24068fda9871436a705b70fd016dfd5
|
examples/json_editor.py
|
examples/json_editor.py
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
self.action_open = QtWidgets.QAction('open file', self)
self.action_open.setShortcut('Ctrl+O')
self.action_open.triggered.connect(self.open_file)
self.addAction(self.action_open)
def open_file(self):
filename, _ = QtWidgets.QFileDialog.getOpenFileName(
self, 'Open JSON file')
if filename:
self.editor.file.open(filename)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
Add missing open action to the example so that you can open other files
|
Add missing open action to the example so that you can open other files
(usefull for testing and evaluating)
|
Python
|
mit
|
pyQode/pyqode.json,pyQode/pyqode.json
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
Add missing open action to the example so that you can open other files
(usefull for testing and evaluating)
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
self.action_open = QtWidgets.QAction('open file', self)
self.action_open.setShortcut('Ctrl+O')
self.action_open.triggered.connect(self.open_file)
self.addAction(self.action_open)
def open_file(self):
filename, _ = QtWidgets.QFileDialog.getOpenFileName(
self, 'Open JSON file')
if filename:
self.editor.file.open(filename)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
<commit_before>"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
<commit_msg>Add missing open action to the example so that you can open other files
(usefull for testing and evaluating)<commit_after>
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
self.action_open = QtWidgets.QAction('open file', self)
self.action_open.setShortcut('Ctrl+O')
self.action_open.triggered.connect(self.open_file)
self.addAction(self.action_open)
def open_file(self):
filename, _ = QtWidgets.QFileDialog.getOpenFileName(
self, 'Open JSON file')
if filename:
self.editor.file.open(filename)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
Add missing open action to the example so that you can open other files
(usefull for testing and evaluating)"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
self.action_open = QtWidgets.QAction('open file', self)
self.action_open.setShortcut('Ctrl+O')
self.action_open.triggered.connect(self.open_file)
self.addAction(self.action_open)
def open_file(self):
filename, _ = QtWidgets.QFileDialog.getOpenFileName(
self, 'Open JSON file')
if filename:
self.editor.file.open(filename)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
<commit_before>"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
<commit_msg>Add missing open action to the example so that you can open other files
(usefull for testing and evaluating)<commit_after>"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
self.action_open = QtWidgets.QAction('open file', self)
self.action_open.setShortcut('Ctrl+O')
self.action_open.triggered.connect(self.open_file)
self.addAction(self.action_open)
def open_file(self):
filename, _ = QtWidgets.QFileDialog.getOpenFileName(
self, 'Open JSON file')
if filename:
self.editor.file.open(filename)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
446738f7615711766952205558fee7ce85ca3a3b
|
MS1/ddp-erlang-style/dna_lib.py
|
MS1/ddp-erlang-style/dna_lib.py
|
__author__ = 'mcsquaredjr'
import os
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
def my_lines(ip):
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip, port = line.split(":")
if ip == str(ip):
line.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1
|
__author__ = 'mcsquaredjr'
import os
import socket
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
itemcount = os.environ["ITEMCOUNT"]
ddp = os.environment["DDP"]
def my_lines(i):
ip = socket.gethostbyname(socket.gethostname())
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip_str, port = line.split(":")
if ip_str == str(ip):
lines.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1 - 2
|
Add more variables and bug fixes
|
Add more variables and bug fixes
|
Python
|
apache-2.0
|
SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC
|
__author__ = 'mcsquaredjr'
import os
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
def my_lines(ip):
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip, port = line.split(":")
if ip == str(ip):
line.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1Add more variables and bug fixes
|
__author__ = 'mcsquaredjr'
import os
import socket
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
itemcount = os.environ["ITEMCOUNT"]
ddp = os.environment["DDP"]
def my_lines(i):
ip = socket.gethostbyname(socket.gethostname())
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip_str, port = line.split(":")
if ip_str == str(ip):
lines.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1 - 2
|
<commit_before>__author__ = 'mcsquaredjr'
import os
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
def my_lines(ip):
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip, port = line.split(":")
if ip == str(ip):
line.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1<commit_msg>Add more variables and bug fixes<commit_after>
|
__author__ = 'mcsquaredjr'
import os
import socket
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
itemcount = os.environ["ITEMCOUNT"]
ddp = os.environment["DDP"]
def my_lines(i):
ip = socket.gethostbyname(socket.gethostname())
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip_str, port = line.split(":")
if ip_str == str(ip):
lines.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1 - 2
|
__author__ = 'mcsquaredjr'
import os
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
def my_lines(ip):
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip, port = line.split(":")
if ip == str(ip):
line.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1Add more variables and bug fixes__author__ = 'mcsquaredjr'
import os
import socket
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
itemcount = os.environ["ITEMCOUNT"]
ddp = os.environment["DDP"]
def my_lines(i):
ip = socket.gethostbyname(socket.gethostname())
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip_str, port = line.split(":")
if ip_str == str(ip):
lines.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1 - 2
|
<commit_before>__author__ = 'mcsquaredjr'
import os
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
def my_lines(ip):
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip, port = line.split(":")
if ip == str(ip):
line.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1<commit_msg>Add more variables and bug fixes<commit_after>__author__ = 'mcsquaredjr'
import os
import socket
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
itemcount = os.environ["ITEMCOUNT"]
ddp = os.environment["DDP"]
def my_lines(i):
ip = socket.gethostbyname(socket.gethostname())
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip_str, port = line.split(":")
if ip_str == str(ip):
lines.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1 - 2
|
936b20a61fc48960cb21a8ad1c81ca1303151776
|
talkoohakemisto/settings/production.py
|
talkoohakemisto/settings/production.py
|
# -*- coding: utf-8 -*-
"""
talkoohakemisto.settings.production
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains application settings specific to a production
environment running on Heroku.
"""
import os
from .base import * # flake8: noqa
#
# Generic
# -------
# If a secret key is set, cryptographic components can use this to sign cookies
# and other things. Set this to a complex random value when you want to use the
# secure cookie for instance.
SECRET_KEY = os.environ['SECRET_KEY']
# The debug flag. Set this to True to enable debugging of the application. In
# debug mode the debugger will kick in when an unhandled exception ocurrs and
# the integrated server will automatically reload the application if changes in
# the code are detected.
DEBUG = 'DEBUG' in os.environ
# Controls if the cookie should be set with the secure flag. Defaults
# to ``False``.
SESSION_COOKIE_SECURE = True
#
# SQLAlchemy
# ----------
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
#
# Email configuration
# -------------------
MAIL_SERVER = 'smtp.mandrillapp.net'
MAIL_USERNAME = os.environ['MANDRILL_USERNAME']
MAIL_PASSWORD = os.environ['MANDRILL_APIKEY']
MAIL_PORT = 587
MAIL_USE_TLS = True
|
# -*- coding: utf-8 -*-
"""
talkoohakemisto.settings.production
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains application settings specific to a production
environment running on Heroku.
"""
import os
from .base import * # flake8: noqa
#
# Generic
# -------
# If a secret key is set, cryptographic components can use this to sign cookies
# and other things. Set this to a complex random value when you want to use the
# secure cookie for instance.
SECRET_KEY = os.environ['SECRET_KEY']
# The debug flag. Set this to True to enable debugging of the application. In
# debug mode the debugger will kick in when an unhandled exception ocurrs and
# the integrated server will automatically reload the application if changes in
# the code are detected.
DEBUG = 'DEBUG' in os.environ
# Controls if the cookie should be set with the secure flag. Defaults
# to ``False``.
SESSION_COOKIE_SECURE = True
#
# SQLAlchemy
# ----------
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
#
# Email configuration
# -------------------
MAIL_SERVER = 'smtp.mandrillapp.com'
MAIL_USERNAME = os.environ['MANDRILL_USERNAME']
MAIL_PASSWORD = os.environ['MANDRILL_APIKEY']
MAIL_PORT = 587
MAIL_USE_TLS = True
|
Fix wrong mail server in settings
|
Fix wrong mail server in settings
|
Python
|
mit
|
talkoopaiva/talkoohakemisto-api
|
# -*- coding: utf-8 -*-
"""
talkoohakemisto.settings.production
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains application settings specific to a production
environment running on Heroku.
"""
import os
from .base import * # flake8: noqa
#
# Generic
# -------
# If a secret key is set, cryptographic components can use this to sign cookies
# and other things. Set this to a complex random value when you want to use the
# secure cookie for instance.
SECRET_KEY = os.environ['SECRET_KEY']
# The debug flag. Set this to True to enable debugging of the application. In
# debug mode the debugger will kick in when an unhandled exception ocurrs and
# the integrated server will automatically reload the application if changes in
# the code are detected.
DEBUG = 'DEBUG' in os.environ
# Controls if the cookie should be set with the secure flag. Defaults
# to ``False``.
SESSION_COOKIE_SECURE = True
#
# SQLAlchemy
# ----------
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
#
# Email configuration
# -------------------
MAIL_SERVER = 'smtp.mandrillapp.net'
MAIL_USERNAME = os.environ['MANDRILL_USERNAME']
MAIL_PASSWORD = os.environ['MANDRILL_APIKEY']
MAIL_PORT = 587
MAIL_USE_TLS = True
Fix wrong mail server in settings
|
# -*- coding: utf-8 -*-
"""
talkoohakemisto.settings.production
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains application settings specific to a production
environment running on Heroku.
"""
import os
from .base import * # flake8: noqa
#
# Generic
# -------
# If a secret key is set, cryptographic components can use this to sign cookies
# and other things. Set this to a complex random value when you want to use the
# secure cookie for instance.
SECRET_KEY = os.environ['SECRET_KEY']
# The debug flag. Set this to True to enable debugging of the application. In
# debug mode the debugger will kick in when an unhandled exception ocurrs and
# the integrated server will automatically reload the application if changes in
# the code are detected.
DEBUG = 'DEBUG' in os.environ
# Controls if the cookie should be set with the secure flag. Defaults
# to ``False``.
SESSION_COOKIE_SECURE = True
#
# SQLAlchemy
# ----------
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
#
# Email configuration
# -------------------
MAIL_SERVER = 'smtp.mandrillapp.com'
MAIL_USERNAME = os.environ['MANDRILL_USERNAME']
MAIL_PASSWORD = os.environ['MANDRILL_APIKEY']
MAIL_PORT = 587
MAIL_USE_TLS = True
|
<commit_before># -*- coding: utf-8 -*-
"""
talkoohakemisto.settings.production
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains application settings specific to a production
environment running on Heroku.
"""
import os
from .base import * # flake8: noqa
#
# Generic
# -------
# If a secret key is set, cryptographic components can use this to sign cookies
# and other things. Set this to a complex random value when you want to use the
# secure cookie for instance.
SECRET_KEY = os.environ['SECRET_KEY']
# The debug flag. Set this to True to enable debugging of the application. In
# debug mode the debugger will kick in when an unhandled exception ocurrs and
# the integrated server will automatically reload the application if changes in
# the code are detected.
DEBUG = 'DEBUG' in os.environ
# Controls if the cookie should be set with the secure flag. Defaults
# to ``False``.
SESSION_COOKIE_SECURE = True
#
# SQLAlchemy
# ----------
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
#
# Email configuration
# -------------------
MAIL_SERVER = 'smtp.mandrillapp.net'
MAIL_USERNAME = os.environ['MANDRILL_USERNAME']
MAIL_PASSWORD = os.environ['MANDRILL_APIKEY']
MAIL_PORT = 587
MAIL_USE_TLS = True
<commit_msg>Fix wrong mail server in settings<commit_after>
|
# -*- coding: utf-8 -*-
"""
talkoohakemisto.settings.production
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains application settings specific to a production
environment running on Heroku.
"""
import os
from .base import * # flake8: noqa
#
# Generic
# -------
# If a secret key is set, cryptographic components can use this to sign cookies
# and other things. Set this to a complex random value when you want to use the
# secure cookie for instance.
SECRET_KEY = os.environ['SECRET_KEY']
# The debug flag. Set this to True to enable debugging of the application. In
# debug mode the debugger will kick in when an unhandled exception ocurrs and
# the integrated server will automatically reload the application if changes in
# the code are detected.
DEBUG = 'DEBUG' in os.environ
# Controls if the cookie should be set with the secure flag. Defaults
# to ``False``.
SESSION_COOKIE_SECURE = True
#
# SQLAlchemy
# ----------
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
#
# Email configuration
# -------------------
MAIL_SERVER = 'smtp.mandrillapp.com'
MAIL_USERNAME = os.environ['MANDRILL_USERNAME']
MAIL_PASSWORD = os.environ['MANDRILL_APIKEY']
MAIL_PORT = 587
MAIL_USE_TLS = True
|
# -*- coding: utf-8 -*-
"""
talkoohakemisto.settings.production
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains application settings specific to a production
environment running on Heroku.
"""
import os
from .base import * # flake8: noqa
#
# Generic
# -------
# If a secret key is set, cryptographic components can use this to sign cookies
# and other things. Set this to a complex random value when you want to use the
# secure cookie for instance.
SECRET_KEY = os.environ['SECRET_KEY']
# The debug flag. Set this to True to enable debugging of the application. In
# debug mode the debugger will kick in when an unhandled exception ocurrs and
# the integrated server will automatically reload the application if changes in
# the code are detected.
DEBUG = 'DEBUG' in os.environ
# Controls if the cookie should be set with the secure flag. Defaults
# to ``False``.
SESSION_COOKIE_SECURE = True
#
# SQLAlchemy
# ----------
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
#
# Email configuration
# -------------------
MAIL_SERVER = 'smtp.mandrillapp.net'
MAIL_USERNAME = os.environ['MANDRILL_USERNAME']
MAIL_PASSWORD = os.environ['MANDRILL_APIKEY']
MAIL_PORT = 587
MAIL_USE_TLS = True
Fix wrong mail server in settings# -*- coding: utf-8 -*-
"""
talkoohakemisto.settings.production
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains application settings specific to a production
environment running on Heroku.
"""
import os
from .base import * # flake8: noqa
#
# Generic
# -------
# If a secret key is set, cryptographic components can use this to sign cookies
# and other things. Set this to a complex random value when you want to use the
# secure cookie for instance.
SECRET_KEY = os.environ['SECRET_KEY']
# The debug flag. Set this to True to enable debugging of the application. In
# debug mode the debugger will kick in when an unhandled exception ocurrs and
# the integrated server will automatically reload the application if changes in
# the code are detected.
DEBUG = 'DEBUG' in os.environ
# Controls if the cookie should be set with the secure flag. Defaults
# to ``False``.
SESSION_COOKIE_SECURE = True
#
# SQLAlchemy
# ----------
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
#
# Email configuration
# -------------------
MAIL_SERVER = 'smtp.mandrillapp.com'
MAIL_USERNAME = os.environ['MANDRILL_USERNAME']
MAIL_PASSWORD = os.environ['MANDRILL_APIKEY']
MAIL_PORT = 587
MAIL_USE_TLS = True
|
<commit_before># -*- coding: utf-8 -*-
"""
talkoohakemisto.settings.production
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains application settings specific to a production
environment running on Heroku.
"""
import os
from .base import * # flake8: noqa
#
# Generic
# -------
# If a secret key is set, cryptographic components can use this to sign cookies
# and other things. Set this to a complex random value when you want to use the
# secure cookie for instance.
SECRET_KEY = os.environ['SECRET_KEY']
# The debug flag. Set this to True to enable debugging of the application. In
# debug mode the debugger will kick in when an unhandled exception ocurrs and
# the integrated server will automatically reload the application if changes in
# the code are detected.
DEBUG = 'DEBUG' in os.environ
# Controls if the cookie should be set with the secure flag. Defaults
# to ``False``.
SESSION_COOKIE_SECURE = True
#
# SQLAlchemy
# ----------
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
#
# Email configuration
# -------------------
MAIL_SERVER = 'smtp.mandrillapp.net'
MAIL_USERNAME = os.environ['MANDRILL_USERNAME']
MAIL_PASSWORD = os.environ['MANDRILL_APIKEY']
MAIL_PORT = 587
MAIL_USE_TLS = True
<commit_msg>Fix wrong mail server in settings<commit_after># -*- coding: utf-8 -*-
"""
talkoohakemisto.settings.production
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains application settings specific to a production
environment running on Heroku.
"""
import os
from .base import * # flake8: noqa
#
# Generic
# -------
# If a secret key is set, cryptographic components can use this to sign cookies
# and other things. Set this to a complex random value when you want to use the
# secure cookie for instance.
SECRET_KEY = os.environ['SECRET_KEY']
# The debug flag. Set this to True to enable debugging of the application. In
# debug mode the debugger will kick in when an unhandled exception ocurrs and
# the integrated server will automatically reload the application if changes in
# the code are detected.
DEBUG = 'DEBUG' in os.environ
# Controls if the cookie should be set with the secure flag. Defaults
# to ``False``.
SESSION_COOKIE_SECURE = True
#
# SQLAlchemy
# ----------
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
#
# Email configuration
# -------------------
MAIL_SERVER = 'smtp.mandrillapp.com'
MAIL_USERNAME = os.environ['MANDRILL_USERNAME']
MAIL_PASSWORD = os.environ['MANDRILL_APIKEY']
MAIL_PORT = 587
MAIL_USE_TLS = True
|
f6013aa29fddf9883f8f0bea4b7733718b9d8846
|
core/admin/migrations/versions/3f6994568962_.py
|
core/admin/migrations/versions/3f6994568962_.py
|
""" Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
from mailu import app
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
connection = op.get_bind()
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
# also apply the current config value if set
if app.config.get("FETCHMAIL_KEEP", "False") == "True":
connection.execute(
fetch_table.update().values(keep=True)
)
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
|
""" Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
|
Fix an old migration that was reading configuration before migrating
|
Fix an old migration that was reading configuration before migrating
|
Python
|
mit
|
kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io
|
""" Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
from mailu import app
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
connection = op.get_bind()
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
# also apply the current config value if set
if app.config.get("FETCHMAIL_KEEP", "False") == "True":
connection.execute(
fetch_table.update().values(keep=True)
)
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
Fix an old migration that was reading configuration before migrating
|
""" Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
|
<commit_before>""" Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
from mailu import app
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
connection = op.get_bind()
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
# also apply the current config value if set
if app.config.get("FETCHMAIL_KEEP", "False") == "True":
connection.execute(
fetch_table.update().values(keep=True)
)
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
<commit_msg>Fix an old migration that was reading configuration before migrating<commit_after>
|
""" Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
|
""" Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
from mailu import app
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
connection = op.get_bind()
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
# also apply the current config value if set
if app.config.get("FETCHMAIL_KEEP", "False") == "True":
connection.execute(
fetch_table.update().values(keep=True)
)
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
Fix an old migration that was reading configuration before migrating""" Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
|
<commit_before>""" Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
from mailu import app
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
connection = op.get_bind()
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
# also apply the current config value if set
if app.config.get("FETCHMAIL_KEEP", "False") == "True":
connection.execute(
fetch_table.update().values(keep=True)
)
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
<commit_msg>Fix an old migration that was reading configuration before migrating<commit_after>""" Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
|
9e785ef701e4c9d04924eff0ffc9c8d50fa267f6
|
ingestors/email/outlookpst.py
|
ingestors/email/outlookpst.py
|
from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
self.exec_command('readpst',
'-M', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
|
from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
except Exception:
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
raise
|
Make readpst partial output ingest.
|
Make readpst partial output ingest.
|
Python
|
mit
|
alephdata/ingestors
|
from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
self.exec_command('readpst',
'-M', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
Make readpst partial output ingest.
|
from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
except Exception:
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
raise
|
<commit_before>from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
self.exec_command('readpst',
'-M', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
<commit_msg>Make readpst partial output ingest.<commit_after>
|
from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
except Exception:
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
raise
|
from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
self.exec_command('readpst',
'-M', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
Make readpst partial output ingest.from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
except Exception:
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
raise
|
<commit_before>from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
self.exec_command('readpst',
'-M', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
<commit_msg>Make readpst partial output ingest.<commit_after>from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
except Exception:
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
raise
|
b4c292374175b8623a232bed47e8fa0bef60680b
|
astatsscraper/parsing.py
|
astatsscraper/parsing.py
|
def parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
full_url = response.urljoin(relative_url)
self.logger.debug(full_url)
yield scrapy.Request(full_url, callback=self.parse_game_stats)
else:
self.logger.debug('Link ignored ' + relative_url)
|
def parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result_for_apps(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
yield relative_url[:len('Steam_Game_Info.php?AppID=')]
|
Fix up parse search func
|
Fix up parse search func
|
Python
|
mit
|
SingingTree/AStatsScraper,SingingTree/AStatsScraper
|
def parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
full_url = response.urljoin(relative_url)
self.logger.debug(full_url)
yield scrapy.Request(full_url, callback=self.parse_game_stats)
else:
self.logger.debug('Link ignored ' + relative_url)Fix up parse search func
|
def parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result_for_apps(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
yield relative_url[:len('Steam_Game_Info.php?AppID=')]
|
<commit_before>def parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
full_url = response.urljoin(relative_url)
self.logger.debug(full_url)
yield scrapy.Request(full_url, callback=self.parse_game_stats)
else:
self.logger.debug('Link ignored ' + relative_url)<commit_msg>Fix up parse search func<commit_after>
|
def parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result_for_apps(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
yield relative_url[:len('Steam_Game_Info.php?AppID=')]
|
def parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
full_url = response.urljoin(relative_url)
self.logger.debug(full_url)
yield scrapy.Request(full_url, callback=self.parse_game_stats)
else:
self.logger.debug('Link ignored ' + relative_url)Fix up parse search funcdef parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result_for_apps(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
yield relative_url[:len('Steam_Game_Info.php?AppID=')]
|
<commit_before>def parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
full_url = response.urljoin(relative_url)
self.logger.debug(full_url)
yield scrapy.Request(full_url, callback=self.parse_game_stats)
else:
self.logger.debug('Link ignored ' + relative_url)<commit_msg>Fix up parse search func<commit_after>def parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result_for_apps(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
yield relative_url[:len('Steam_Game_Info.php?AppID=')]
|
43d283cb4cb01ec919f9129571a51594e699fcf5
|
src/gogoutils/formats.py
|
src/gogoutils/formats.py
|
"""Determine the generator format"""
from collections import ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
|
"""Determine the generator format"""
try:
from collections import ChainMap
except ImportError:
from ConfigParser import _Chainmap as ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
|
Add Chainmap support for python2
|
Add Chainmap support for python2
|
Python
|
apache-2.0
|
gogoair/gogo-utils
|
"""Determine the generator format"""
from collections import ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
Add Chainmap support for python2
|
"""Determine the generator format"""
try:
from collections import ChainMap
except ImportError:
from ConfigParser import _Chainmap as ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
|
<commit_before>"""Determine the generator format"""
from collections import ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
<commit_msg>Add Chainmap support for python2<commit_after>
|
"""Determine the generator format"""
try:
from collections import ChainMap
except ImportError:
from ConfigParser import _Chainmap as ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
|
"""Determine the generator format"""
from collections import ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
Add Chainmap support for python2"""Determine the generator format"""
try:
from collections import ChainMap
except ImportError:
from ConfigParser import _Chainmap as ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
|
<commit_before>"""Determine the generator format"""
from collections import ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
<commit_msg>Add Chainmap support for python2<commit_after>"""Determine the generator format"""
try:
from collections import ChainMap
except ImportError:
from ConfigParser import _Chainmap as ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
|
a75ff3a9d9b86ea71fbc582641ea943c282bfe2d
|
analyser/api.py
|
analyser/api.py
|
from flask.ext.classy import FlaskView
class AnalyserView(FlaskView):
def get(self):
return "awesome"
|
from flask.ext.classy import FlaskView
from utils.decorators import validate, require
from utils.validators import validate_url
class AnalyserView(FlaskView):
@require('url')
@validate({
'url': validate_url
})
def post(self, url):
return url
|
Add more joy using decorators
|
Add more joy using decorators
|
Python
|
apache-2.0
|
vtemian/kruncher
|
from flask.ext.classy import FlaskView
class AnalyserView(FlaskView):
def get(self):
return "awesome"
Add more joy using decorators
|
from flask.ext.classy import FlaskView
from utils.decorators import validate, require
from utils.validators import validate_url
class AnalyserView(FlaskView):
@require('url')
@validate({
'url': validate_url
})
def post(self, url):
return url
|
<commit_before>from flask.ext.classy import FlaskView
class AnalyserView(FlaskView):
def get(self):
return "awesome"
<commit_msg>Add more joy using decorators<commit_after>
|
from flask.ext.classy import FlaskView
from utils.decorators import validate, require
from utils.validators import validate_url
class AnalyserView(FlaskView):
@require('url')
@validate({
'url': validate_url
})
def post(self, url):
return url
|
from flask.ext.classy import FlaskView
class AnalyserView(FlaskView):
def get(self):
return "awesome"
Add more joy using decoratorsfrom flask.ext.classy import FlaskView
from utils.decorators import validate, require
from utils.validators import validate_url
class AnalyserView(FlaskView):
@require('url')
@validate({
'url': validate_url
})
def post(self, url):
return url
|
<commit_before>from flask.ext.classy import FlaskView
class AnalyserView(FlaskView):
def get(self):
return "awesome"
<commit_msg>Add more joy using decorators<commit_after>from flask.ext.classy import FlaskView
from utils.decorators import validate, require
from utils.validators import validate_url
class AnalyserView(FlaskView):
@require('url')
@validate({
'url': validate_url
})
def post(self, url):
return url
|
402c010b6ab4673ae3b5c684b8e0c155ec98b172
|
gentle/gt/operations.py
|
gentle/gt/operations.py
|
#coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(green(service + "start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + "fail..."))
continue
print(green(service + "end..."))
|
#coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green, yellow
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(yellow(service) + ": " + green("start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + ": fail..."))
continue
print(yellow(service) + ": " + green("end..."))
|
Add yellow color for services
|
Add yellow color for services
|
Python
|
apache-2.0
|
dongweiming/gentle
|
#coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(green(service + "start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + "fail..."))
continue
print(green(service + "end..."))
Add yellow color for services
|
#coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green, yellow
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(yellow(service) + ": " + green("start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + ": fail..."))
continue
print(yellow(service) + ": " + green("end..."))
|
<commit_before>#coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(green(service + "start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + "fail..."))
continue
print(green(service + "end..."))
<commit_msg>Add yellow color for services<commit_after>
|
#coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green, yellow
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(yellow(service) + ": " + green("start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + ": fail..."))
continue
print(yellow(service) + ": " + green("end..."))
|
#coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(green(service + "start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + "fail..."))
continue
print(green(service + "end..."))
Add yellow color for services#coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green, yellow
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(yellow(service) + ": " + green("start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + ": fail..."))
continue
print(yellow(service) + ": " + green("end..."))
|
<commit_before>#coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(green(service + "start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + "fail..."))
continue
print(green(service + "end..."))
<commit_msg>Add yellow color for services<commit_after>#coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green, yellow
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(yellow(service) + ": " + green("start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + ": fail..."))
continue
print(yellow(service) + ": " + green("end..."))
|
7a804eac3f354a778eda3daa8cd5f88b09259f74
|
south/signals.py
|
south/signals.py
|
"""
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
if 'django.contrib.auth' in settings.INSTALLED_APPS:
def create_permissions_compat(app, **kwargs):
from django.db.models import get_app
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), (), 0)
post_migrate.connect(create_permissions_compat)
|
"""
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
# Is causing strange errors, removing for now (we might need to fix up orm first)
#if 'django.contrib.auth' in settings.INSTALLED_APPS:
#def create_permissions_compat(app, **kwargs):
#from django.db.models import get_app
#from django.contrib.auth.management import create_permissions
#create_permissions(get_app(app), (), 0)
#post_migrate.connect(create_permissions_compat)
|
Remove the auth contenttypes thing for now, needs improvement
|
Remove the auth contenttypes thing for now, needs improvement
|
Python
|
apache-2.0
|
smartfile/django-south,smartfile/django-south
|
"""
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
if 'django.contrib.auth' in settings.INSTALLED_APPS:
def create_permissions_compat(app, **kwargs):
from django.db.models import get_app
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), (), 0)
post_migrate.connect(create_permissions_compat)
Remove the auth contenttypes thing for now, needs improvement
|
"""
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
# Is causing strange errors, removing for now (we might need to fix up orm first)
#if 'django.contrib.auth' in settings.INSTALLED_APPS:
#def create_permissions_compat(app, **kwargs):
#from django.db.models import get_app
#from django.contrib.auth.management import create_permissions
#create_permissions(get_app(app), (), 0)
#post_migrate.connect(create_permissions_compat)
|
<commit_before>"""
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
if 'django.contrib.auth' in settings.INSTALLED_APPS:
def create_permissions_compat(app, **kwargs):
from django.db.models import get_app
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), (), 0)
post_migrate.connect(create_permissions_compat)
<commit_msg>Remove the auth contenttypes thing for now, needs improvement<commit_after>
|
"""
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
# Is causing strange errors, removing for now (we might need to fix up orm first)
#if 'django.contrib.auth' in settings.INSTALLED_APPS:
#def create_permissions_compat(app, **kwargs):
#from django.db.models import get_app
#from django.contrib.auth.management import create_permissions
#create_permissions(get_app(app), (), 0)
#post_migrate.connect(create_permissions_compat)
|
"""
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
if 'django.contrib.auth' in settings.INSTALLED_APPS:
def create_permissions_compat(app, **kwargs):
from django.db.models import get_app
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), (), 0)
post_migrate.connect(create_permissions_compat)
Remove the auth contenttypes thing for now, needs improvement"""
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
# Is causing strange errors, removing for now (we might need to fix up orm first)
#if 'django.contrib.auth' in settings.INSTALLED_APPS:
#def create_permissions_compat(app, **kwargs):
#from django.db.models import get_app
#from django.contrib.auth.management import create_permissions
#create_permissions(get_app(app), (), 0)
#post_migrate.connect(create_permissions_compat)
|
<commit_before>"""
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
if 'django.contrib.auth' in settings.INSTALLED_APPS:
def create_permissions_compat(app, **kwargs):
from django.db.models import get_app
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), (), 0)
post_migrate.connect(create_permissions_compat)
<commit_msg>Remove the auth contenttypes thing for now, needs improvement<commit_after>"""
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
# Is causing strange errors, removing for now (we might need to fix up orm first)
#if 'django.contrib.auth' in settings.INSTALLED_APPS:
#def create_permissions_compat(app, **kwargs):
#from django.db.models import get_app
#from django.contrib.auth.management import create_permissions
#create_permissions(get_app(app), (), 0)
#post_migrate.connect(create_permissions_compat)
|
8b1d878aff4168d74437d3ba0cfaf8307e7c377d
|
consts/model_type.py
|
consts/model_type.py
|
class ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
AWARD = 5
MEDIA = 6
|
class ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
DISTRICT_TEAM = 5
AWARD = 6
MEDIA = 7
|
Update model enums to match app
|
Update model enums to match app
|
Python
|
mit
|
josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,1fish2/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,1fish2/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance,1fish2/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,josephbisch/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,1fish2/the-blue-alliance
|
class ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
AWARD = 5
MEDIA = 6
Update model enums to match app
|
class ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
DISTRICT_TEAM = 5
AWARD = 6
MEDIA = 7
|
<commit_before>class ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
AWARD = 5
MEDIA = 6
<commit_msg>Update model enums to match app<commit_after>
|
class ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
DISTRICT_TEAM = 5
AWARD = 6
MEDIA = 7
|
class ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
AWARD = 5
MEDIA = 6
Update model enums to match appclass ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
DISTRICT_TEAM = 5
AWARD = 6
MEDIA = 7
|
<commit_before>class ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
AWARD = 5
MEDIA = 6
<commit_msg>Update model enums to match app<commit_after>class ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
DISTRICT_TEAM = 5
AWARD = 6
MEDIA = 7
|
4fb3a127706d7ff7ead0d2d8b698183905d85d4e
|
dependency_injector/__init__.py
|
dependency_injector/__init__.py
|
"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .errors import Error
__all__ = ('AbstractCatalog',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
# Decorators
'override',
'inject',
# Errors
'Error')
|
"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import Injection
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .utils import is_provider
from .utils import ensure_is_provider
from .utils import is_injection
from .utils import ensure_is_injection
from .utils import is_kwarg_injection
from .utils import is_attribute_injection
from .utils import is_method_injection
from .errors import Error
__all__ = (
# Catalogs
'AbstractCatalog',
'override',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
'inject',
# Utils
'is_provider',
'ensure_is_provider',
'is_injection',
'ensure_is_injection',
'is_kwarg_injection',
'is_attribute_injection',
'is_method_injection',
# Errors
'Error',
)
|
Add additional shortcuts for top level package
|
Add additional shortcuts for top level package
|
Python
|
bsd-3-clause
|
ets-labs/python-dependency-injector,ets-labs/dependency_injector,rmk135/dependency_injector,rmk135/objects
|
"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .errors import Error
__all__ = ('AbstractCatalog',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
# Decorators
'override',
'inject',
# Errors
'Error')
Add additional shortcuts for top level package
|
"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import Injection
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .utils import is_provider
from .utils import ensure_is_provider
from .utils import is_injection
from .utils import ensure_is_injection
from .utils import is_kwarg_injection
from .utils import is_attribute_injection
from .utils import is_method_injection
from .errors import Error
__all__ = (
# Catalogs
'AbstractCatalog',
'override',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
'inject',
# Utils
'is_provider',
'ensure_is_provider',
'is_injection',
'ensure_is_injection',
'is_kwarg_injection',
'is_attribute_injection',
'is_method_injection',
# Errors
'Error',
)
|
<commit_before>"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .errors import Error
__all__ = ('AbstractCatalog',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
# Decorators
'override',
'inject',
# Errors
'Error')
<commit_msg>Add additional shortcuts for top level package<commit_after>
|
"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import Injection
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .utils import is_provider
from .utils import ensure_is_provider
from .utils import is_injection
from .utils import ensure_is_injection
from .utils import is_kwarg_injection
from .utils import is_attribute_injection
from .utils import is_method_injection
from .errors import Error
__all__ = (
# Catalogs
'AbstractCatalog',
'override',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
'inject',
# Utils
'is_provider',
'ensure_is_provider',
'is_injection',
'ensure_is_injection',
'is_kwarg_injection',
'is_attribute_injection',
'is_method_injection',
# Errors
'Error',
)
|
"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .errors import Error
__all__ = ('AbstractCatalog',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
# Decorators
'override',
'inject',
# Errors
'Error')
Add additional shortcuts for top level package"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import Injection
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .utils import is_provider
from .utils import ensure_is_provider
from .utils import is_injection
from .utils import ensure_is_injection
from .utils import is_kwarg_injection
from .utils import is_attribute_injection
from .utils import is_method_injection
from .errors import Error
__all__ = (
# Catalogs
'AbstractCatalog',
'override',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
'inject',
# Utils
'is_provider',
'ensure_is_provider',
'is_injection',
'ensure_is_injection',
'is_kwarg_injection',
'is_attribute_injection',
'is_method_injection',
# Errors
'Error',
)
|
<commit_before>"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .errors import Error
__all__ = ('AbstractCatalog',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
# Decorators
'override',
'inject',
# Errors
'Error')
<commit_msg>Add additional shortcuts for top level package<commit_after>"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import Injection
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .utils import is_provider
from .utils import ensure_is_provider
from .utils import is_injection
from .utils import ensure_is_injection
from .utils import is_kwarg_injection
from .utils import is_attribute_injection
from .utils import is_method_injection
from .errors import Error
__all__ = (
# Catalogs
'AbstractCatalog',
'override',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
'inject',
# Utils
'is_provider',
'ensure_is_provider',
'is_injection',
'ensure_is_injection',
'is_kwarg_injection',
'is_attribute_injection',
'is_method_injection',
# Errors
'Error',
)
|
b61bf7dbdb26b6ff3e76f10173ffb94a76cd4f4e
|
lego.py
|
lego.py
|
import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = []
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if len(self.rect) == 4:
self.rect = []
self.rect.append((x, y))
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
|
import numpy as np
import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = np.empty((4, 2))
self.rect_index = -1
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def has_roi(self):
return self.rect_index == 3
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if self.has_roi():
self.rect_index = -1
self.rect_index += 1
self.rect[self.rect_index][0] = x
self.rect[self.rect_index][1] = y
if self.has_roi():
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
|
Use numpy array for ROI
|
Use numpy array for ROI
|
Python
|
mit
|
superquadratic/beat-bricks
|
import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = []
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if len(self.rect) == 4:
self.rect = []
self.rect.append((x, y))
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
Use numpy array for ROI
|
import numpy as np
import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = np.empty((4, 2))
self.rect_index = -1
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def has_roi(self):
return self.rect_index == 3
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if self.has_roi():
self.rect_index = -1
self.rect_index += 1
self.rect[self.rect_index][0] = x
self.rect[self.rect_index][1] = y
if self.has_roi():
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
|
<commit_before>import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = []
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if len(self.rect) == 4:
self.rect = []
self.rect.append((x, y))
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
<commit_msg>Use numpy array for ROI<commit_after>
|
import numpy as np
import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = np.empty((4, 2))
self.rect_index = -1
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def has_roi(self):
return self.rect_index == 3
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if self.has_roi():
self.rect_index = -1
self.rect_index += 1
self.rect[self.rect_index][0] = x
self.rect[self.rect_index][1] = y
if self.has_roi():
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
|
import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = []
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if len(self.rect) == 4:
self.rect = []
self.rect.append((x, y))
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
Use numpy array for ROIimport numpy as np
import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = np.empty((4, 2))
self.rect_index = -1
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def has_roi(self):
return self.rect_index == 3
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if self.has_roi():
self.rect_index = -1
self.rect_index += 1
self.rect[self.rect_index][0] = x
self.rect[self.rect_index][1] = y
if self.has_roi():
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
|
<commit_before>import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = []
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if len(self.rect) == 4:
self.rect = []
self.rect.append((x, y))
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
<commit_msg>Use numpy array for ROI<commit_after>import numpy as np
import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = np.empty((4, 2))
self.rect_index = -1
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def has_roi(self):
return self.rect_index == 3
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if self.has_roi():
self.rect_index = -1
self.rect_index += 1
self.rect[self.rect_index][0] = x
self.rect[self.rect_index][1] = y
if self.has_roi():
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
|
da75222fa286588394da7f689d47bd53716ffaa1
|
coverage/execfile.py
|
coverage/execfile.py
|
"""Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module("__main__")
sys.modules['__main__'] = main_mod
main_mod.__dict__.update({
'__name__': '__main__',
'__file__': filename,
})
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
|
"""Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module('__main__')
sys.modules['__main__'] = main_mod
main_mod.__file__ = filename
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
|
Simplify the construction of the __main__ module in run_python_file.
|
Simplify the construction of the __main__ module in run_python_file.
|
Python
|
apache-2.0
|
blueyed/coveragepy,nedbat/coveragepy,larsbutler/coveragepy,blueyed/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,larsbutler/coveragepy,nedbat/coveragepy,larsbutler/coveragepy,hugovk/coveragepy,larsbutler/coveragepy,7WebPages/coveragepy,larsbutler/coveragepy,nedbat/coveragepy,hugovk/coveragepy,blueyed/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,hugovk/coveragepy,blueyed/coveragepy,hugovk/coveragepy,jayhetee/coveragepy,jayhetee/coveragepy,blueyed/coveragepy,nedbat/coveragepy,hugovk/coveragepy,nedbat/coveragepy
|
"""Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module("__main__")
sys.modules['__main__'] = main_mod
main_mod.__dict__.update({
'__name__': '__main__',
'__file__': filename,
})
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
Simplify the construction of the __main__ module in run_python_file.
|
"""Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module('__main__')
sys.modules['__main__'] = main_mod
main_mod.__file__ = filename
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
|
<commit_before>"""Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module("__main__")
sys.modules['__main__'] = main_mod
main_mod.__dict__.update({
'__name__': '__main__',
'__file__': filename,
})
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
<commit_msg>Simplify the construction of the __main__ module in run_python_file.<commit_after>
|
"""Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module('__main__')
sys.modules['__main__'] = main_mod
main_mod.__file__ = filename
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
|
"""Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module("__main__")
sys.modules['__main__'] = main_mod
main_mod.__dict__.update({
'__name__': '__main__',
'__file__': filename,
})
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
Simplify the construction of the __main__ module in run_python_file."""Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module('__main__')
sys.modules['__main__'] = main_mod
main_mod.__file__ = filename
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
|
<commit_before>"""Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module("__main__")
sys.modules['__main__'] = main_mod
main_mod.__dict__.update({
'__name__': '__main__',
'__file__': filename,
})
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
<commit_msg>Simplify the construction of the __main__ module in run_python_file.<commit_after>"""Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module('__main__')
sys.modules['__main__'] = main_mod
main_mod.__file__ = filename
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
|
64d7fb0b9ae9e14447a236a51e27b033aee20219
|
urls.py
|
urls.py
|
from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^registration/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
|
from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^accounts/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
|
Move pages from registration to accounts/
|
Move pages from registration to accounts/
|
Python
|
agpl-3.0
|
pu239ppy/authentic2,BryceLohr/authentic,pu239ppy/authentic2,incuna/authentic,adieu/authentic2,pu239ppy/authentic2,pu239ppy/authentic2,incuna/authentic,adieu/authentic2,adieu/authentic2,BryceLohr/authentic,BryceLohr/authentic,incuna/authentic,incuna/authentic,adieu/authentic2,incuna/authentic,BryceLohr/authentic
|
from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^registration/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
Move pages from registration to accounts/
|
from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^accounts/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
|
<commit_before>from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^registration/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
<commit_msg>Move pages from registration to accounts/<commit_after>
|
from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^accounts/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
|
from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^registration/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
Move pages from registration to accounts/from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^accounts/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
|
<commit_before>from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^registration/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
<commit_msg>Move pages from registration to accounts/<commit_after>from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^accounts/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
|
96884e4c35b89cb1f63a6249c9c24e27894a3752
|
tacker/db/api.py
|
tacker/db/api.py
|
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
|
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
|
Remove unused LOG to keep code clean
|
Remove unused LOG to keep code clean
TrivialFix
Change-Id: I21fa9ebda98005c377d395f498cb44cf6599f0e5
|
Python
|
apache-2.0
|
stackforge/tacker,zeinsteinz/tacker,stackforge/tacker,openstack/tacker,openstack/tacker,openstack/tacker,zeinsteinz/tacker
|
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
Remove unused LOG to keep code clean
TrivialFix
Change-Id: I21fa9ebda98005c377d395f498cb44cf6599f0e5
|
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
|
<commit_before># Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
<commit_msg>Remove unused LOG to keep code clean
TrivialFix
Change-Id: I21fa9ebda98005c377d395f498cb44cf6599f0e5<commit_after>
|
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
|
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
Remove unused LOG to keep code clean
TrivialFix
Change-Id: I21fa9ebda98005c377d395f498cb44cf6599f0e5# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
|
<commit_before># Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
<commit_msg>Remove unused LOG to keep code clean
TrivialFix
Change-Id: I21fa9ebda98005c377d395f498cb44cf6599f0e5<commit_after># Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
|
50bdb59f7629b60d6aa6c9f3f21b447f00476b19
|
webmanager/views_oauth2.py
|
webmanager/views_oauth2.py
|
from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
user_instance = user_access_token.user # User.objects.get(username=user_access_token.user)
user_instance.backend = "django.contrib.auth.backends.ModelBackend"
login(request, user_instance)
return HttpResponseRedirect(target)
|
from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
from djangoautoconf.req_with_auth import login_by_django_user
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
django_user_instance = user_access_token.user
login_by_django_user(django_user_instance, request)
return HttpResponseRedirect(target)
|
Move user login code to djangoautoconf.
|
Move user login code to djangoautoconf.
|
Python
|
bsd-3-clause
|
weijia/webmanager,weijia/webmanager,weijia/webmanager
|
from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
user_instance = user_access_token.user # User.objects.get(username=user_access_token.user)
user_instance.backend = "django.contrib.auth.backends.ModelBackend"
login(request, user_instance)
return HttpResponseRedirect(target)
Move user login code to djangoautoconf.
|
from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
from djangoautoconf.req_with_auth import login_by_django_user
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
django_user_instance = user_access_token.user
login_by_django_user(django_user_instance, request)
return HttpResponseRedirect(target)
|
<commit_before>from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
user_instance = user_access_token.user # User.objects.get(username=user_access_token.user)
user_instance.backend = "django.contrib.auth.backends.ModelBackend"
login(request, user_instance)
return HttpResponseRedirect(target)
<commit_msg>Move user login code to djangoautoconf.<commit_after>
|
from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
from djangoautoconf.req_with_auth import login_by_django_user
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
django_user_instance = user_access_token.user
login_by_django_user(django_user_instance, request)
return HttpResponseRedirect(target)
|
from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
user_instance = user_access_token.user # User.objects.get(username=user_access_token.user)
user_instance.backend = "django.contrib.auth.backends.ModelBackend"
login(request, user_instance)
return HttpResponseRedirect(target)
Move user login code to djangoautoconf.from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
from djangoautoconf.req_with_auth import login_by_django_user
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
django_user_instance = user_access_token.user
login_by_django_user(django_user_instance, request)
return HttpResponseRedirect(target)
|
<commit_before>from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
user_instance = user_access_token.user # User.objects.get(username=user_access_token.user)
user_instance.backend = "django.contrib.auth.backends.ModelBackend"
login(request, user_instance)
return HttpResponseRedirect(target)
<commit_msg>Move user login code to djangoautoconf.<commit_after>from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
from djangoautoconf.req_with_auth import login_by_django_user
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
django_user_instance = user_access_token.user
login_by_django_user(django_user_instance, request)
return HttpResponseRedirect(target)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.