commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
4bc31e675659af54ee26fe5df16a0ee3ebeb5947
firefed/__main__.py
firefed/__main__.py
import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, required=True) parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main()
import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, default='default') parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main()
Add default argument for profile
Add default argument for profile
Python
mit
numirias/firefed
import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, required=True) parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main() Add default argument for profile
import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, default='default') parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main()
<commit_before>import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, required=True) parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main() <commit_msg>Add default argument for profile<commit_after>
import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, default='default') parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main()
import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, required=True) parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main() Add default argument for profileimport argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, default='default') parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main()
<commit_before>import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, required=True) parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main() <commit_msg>Add default argument for profile<commit_after>import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, default='default') parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main()
138df31dc628daad0c60f062b05774d6c7d4338d
src/kuas_api/modules/const.py
src/kuas_api/modules/const.py
#-*- coding: utf-8 -*- device_version = { "android": "2.1.2", "android_donate": "2.1.2", "ios": "1.4.3" } # Token duration in seconds token_duration = 3600 # HTTP Status Code ok = 200 no_content = 204
#-*- coding: utf-8 -*- device_version = { "android": "2.1.3", "android_donate": "2.1.2", "ios": "1.6.0" } # Token duration in seconds token_duration = 3600 serect_key = "usapoijupojfa;dsj;lv;ldakjads;lfkjapoiuewqprjf" # HTTP Status Code ok = 200 no_content = 204
Change android version to 2.1.3
Change android version to 2.1.3
Python
mit
JohnSounder/AP-API,kuastw/AP-API,kuastw/AP-API,JohnSounder/AP-API
#-*- coding: utf-8 -*- device_version = { "android": "2.1.2", "android_donate": "2.1.2", "ios": "1.4.3" } # Token duration in seconds token_duration = 3600 # HTTP Status Code ok = 200 no_content = 204 Change android version to 2.1.3
#-*- coding: utf-8 -*- device_version = { "android": "2.1.3", "android_donate": "2.1.2", "ios": "1.6.0" } # Token duration in seconds token_duration = 3600 serect_key = "usapoijupojfa;dsj;lv;ldakjads;lfkjapoiuewqprjf" # HTTP Status Code ok = 200 no_content = 204
<commit_before>#-*- coding: utf-8 -*- device_version = { "android": "2.1.2", "android_donate": "2.1.2", "ios": "1.4.3" } # Token duration in seconds token_duration = 3600 # HTTP Status Code ok = 200 no_content = 204 <commit_msg>Change android version to 2.1.3<commit_after>
#-*- coding: utf-8 -*- device_version = { "android": "2.1.3", "android_donate": "2.1.2", "ios": "1.6.0" } # Token duration in seconds token_duration = 3600 serect_key = "usapoijupojfa;dsj;lv;ldakjads;lfkjapoiuewqprjf" # HTTP Status Code ok = 200 no_content = 204
#-*- coding: utf-8 -*- device_version = { "android": "2.1.2", "android_donate": "2.1.2", "ios": "1.4.3" } # Token duration in seconds token_duration = 3600 # HTTP Status Code ok = 200 no_content = 204 Change android version to 2.1.3#-*- coding: utf-8 -*- device_version = { "android": "2.1.3", "android_donate": "2.1.2", "ios": "1.6.0" } # Token duration in seconds token_duration = 3600 serect_key = "usapoijupojfa;dsj;lv;ldakjads;lfkjapoiuewqprjf" # HTTP Status Code ok = 200 no_content = 204
<commit_before>#-*- coding: utf-8 -*- device_version = { "android": "2.1.2", "android_donate": "2.1.2", "ios": "1.4.3" } # Token duration in seconds token_duration = 3600 # HTTP Status Code ok = 200 no_content = 204 <commit_msg>Change android version to 2.1.3<commit_after>#-*- coding: utf-8 -*- device_version = { "android": "2.1.3", "android_donate": "2.1.2", "ios": "1.6.0" } # Token duration in seconds token_duration = 3600 serect_key = "usapoijupojfa;dsj;lv;ldakjads;lfkjapoiuewqprjf" # HTTP Status Code ok = 200 no_content = 204
1d9e9f2b7a2259f19a48ab0e0f41439ba5224648
src/adhocracy/lib/auth/shibboleth.py
src/adhocracy/lib/auth/shibboleth.py
from pylons import config def get_userbadge_mapping(config=config): mapping = config.get('adhocracy.shibboleth.userbadge_mapping', u'') return (line.strip().split(u' ') for line in mapping.strip().split(u'\n') if line is not u'') def _attribute_equals(request, key, value): return request.headers.get(key) == value USERBADGE_MAPPERS = { 'attribute_equals': _attribute_equals, }
from pylons import config def get_userbadge_mapping(config=config): mapping = config.get('adhocracy.shibboleth.userbadge_mapping', u'') return (line.strip().split(u' ') for line in mapping.strip().split(u'\n') if line is not u'') def _attribute_equals(request, key, value): """ exact match """ return request.headers.get(key) == value def _attribute_contains(request, key, value): """ contains element """ elements = (e.strip() for e in request.headers.get(key).split(',')) return value in elements def _attribute_contains_substring(request, key, value): """ contains substring """ return value in request.headers.get(key) USERBADGE_MAPPERS = { 'attribute_equals': _attribute_equals, 'attribute_contains': _attribute_contains, 'attribute_contains_substring': _attribute_contains_substring, }
Add userbade mappings contains and contains_substring
Add userbade mappings contains and contains_substring
Python
agpl-3.0
DanielNeugebauer/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,liqd/adhocracy,alkadis/vcv,SysTheron/adhocracy,phihag/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,alkadis/vcv,alkadis/vcv,liqd/adhocracy,SysTheron/adhocracy,liqd/adhocracy,phihag/adhocracy,SysTheron/adhocracy,phihag/adhocracy,phihag/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy
from pylons import config def get_userbadge_mapping(config=config): mapping = config.get('adhocracy.shibboleth.userbadge_mapping', u'') return (line.strip().split(u' ') for line in mapping.strip().split(u'\n') if line is not u'') def _attribute_equals(request, key, value): return request.headers.get(key) == value USERBADGE_MAPPERS = { 'attribute_equals': _attribute_equals, } Add userbade mappings contains and contains_substring
from pylons import config def get_userbadge_mapping(config=config): mapping = config.get('adhocracy.shibboleth.userbadge_mapping', u'') return (line.strip().split(u' ') for line in mapping.strip().split(u'\n') if line is not u'') def _attribute_equals(request, key, value): """ exact match """ return request.headers.get(key) == value def _attribute_contains(request, key, value): """ contains element """ elements = (e.strip() for e in request.headers.get(key).split(',')) return value in elements def _attribute_contains_substring(request, key, value): """ contains substring """ return value in request.headers.get(key) USERBADGE_MAPPERS = { 'attribute_equals': _attribute_equals, 'attribute_contains': _attribute_contains, 'attribute_contains_substring': _attribute_contains_substring, }
<commit_before>from pylons import config def get_userbadge_mapping(config=config): mapping = config.get('adhocracy.shibboleth.userbadge_mapping', u'') return (line.strip().split(u' ') for line in mapping.strip().split(u'\n') if line is not u'') def _attribute_equals(request, key, value): return request.headers.get(key) == value USERBADGE_MAPPERS = { 'attribute_equals': _attribute_equals, } <commit_msg>Add userbade mappings contains and contains_substring<commit_after>
from pylons import config def get_userbadge_mapping(config=config): mapping = config.get('adhocracy.shibboleth.userbadge_mapping', u'') return (line.strip().split(u' ') for line in mapping.strip().split(u'\n') if line is not u'') def _attribute_equals(request, key, value): """ exact match """ return request.headers.get(key) == value def _attribute_contains(request, key, value): """ contains element """ elements = (e.strip() for e in request.headers.get(key).split(',')) return value in elements def _attribute_contains_substring(request, key, value): """ contains substring """ return value in request.headers.get(key) USERBADGE_MAPPERS = { 'attribute_equals': _attribute_equals, 'attribute_contains': _attribute_contains, 'attribute_contains_substring': _attribute_contains_substring, }
from pylons import config def get_userbadge_mapping(config=config): mapping = config.get('adhocracy.shibboleth.userbadge_mapping', u'') return (line.strip().split(u' ') for line in mapping.strip().split(u'\n') if line is not u'') def _attribute_equals(request, key, value): return request.headers.get(key) == value USERBADGE_MAPPERS = { 'attribute_equals': _attribute_equals, } Add userbade mappings contains and contains_substringfrom pylons import config def get_userbadge_mapping(config=config): mapping = config.get('adhocracy.shibboleth.userbadge_mapping', u'') return (line.strip().split(u' ') for line in mapping.strip().split(u'\n') if line is not u'') def _attribute_equals(request, key, value): """ exact match """ return request.headers.get(key) == value def _attribute_contains(request, key, value): """ contains element """ elements = (e.strip() for e in request.headers.get(key).split(',')) return value in elements def _attribute_contains_substring(request, key, value): """ contains substring """ return value in request.headers.get(key) USERBADGE_MAPPERS = { 'attribute_equals': _attribute_equals, 'attribute_contains': _attribute_contains, 'attribute_contains_substring': _attribute_contains_substring, }
<commit_before>from pylons import config def get_userbadge_mapping(config=config): mapping = config.get('adhocracy.shibboleth.userbadge_mapping', u'') return (line.strip().split(u' ') for line in mapping.strip().split(u'\n') if line is not u'') def _attribute_equals(request, key, value): return request.headers.get(key) == value USERBADGE_MAPPERS = { 'attribute_equals': _attribute_equals, } <commit_msg>Add userbade mappings contains and contains_substring<commit_after>from pylons import config def get_userbadge_mapping(config=config): mapping = config.get('adhocracy.shibboleth.userbadge_mapping', u'') return (line.strip().split(u' ') for line in mapping.strip().split(u'\n') if line is not u'') def _attribute_equals(request, key, value): """ exact match """ return request.headers.get(key) == value def _attribute_contains(request, key, value): """ contains element """ elements = (e.strip() for e in request.headers.get(key).split(',')) return value in elements def _attribute_contains_substring(request, key, value): """ contains substring """ return value in request.headers.get(key) USERBADGE_MAPPERS = { 'attribute_equals': _attribute_equals, 'attribute_contains': _attribute_contains, 'attribute_contains_substring': _attribute_contains_substring, }
bd6c8b0354e9a32c47593ea19d09789d2a36912f
conanfile.py
conanfile.py
from conans import ConanFile class CtreConan(ConanFile): name = "CTRE" version = "2.0" license = "MIT" url = "https://github.com/hanickadot/compile-time-regular-expressions.git" author = "Hana Dusíková (ctre@hanicka.net)" description = "Compile Time Regular Expression for C++17/20" homepage = "https://github.com/hanickadot/compile-time-regular-expressions" exports = "LICENSE" exports_sources = "include/*" no_copy_source = True def package(self): self.copy("*.hpp") def package_id(self): self.info.header_only()
#!/usr/bin/env python # -*- coding: utf-8 -*- from conans import ConanFile class CtreConan(ConanFile): name = "CTRE" version = "2.0" license = "MIT" url = "https://github.com/hanickadot/compile-time-regular-expressions.git" author = "Hana Dusíková (ctre@hanicka.net)" description = "Compile Time Regular Expression for C++17/20" homepage = "https://github.com/hanickadot/compile-time-regular-expressions" exports = "LICENSE" exports_sources = "include/*" no_copy_source = True def package(self): self.copy("*.hpp") def package_id(self): self.info.header_only()
Use UTF-8 in Conan recipe
Use UTF-8 in Conan recipe - Force python interpreter to use UTF-8 Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>
Python
apache-2.0
hanickadot/compile-time-regular-expressions,hanickadot/compile-time-regular-expressions,hanickadot/compile-time-regular-expressions,hanickadot/syntax-parser
from conans import ConanFile class CtreConan(ConanFile): name = "CTRE" version = "2.0" license = "MIT" url = "https://github.com/hanickadot/compile-time-regular-expressions.git" author = "Hana Dusíková (ctre@hanicka.net)" description = "Compile Time Regular Expression for C++17/20" homepage = "https://github.com/hanickadot/compile-time-regular-expressions" exports = "LICENSE" exports_sources = "include/*" no_copy_source = True def package(self): self.copy("*.hpp") def package_id(self): self.info.header_only() Use UTF-8 in Conan recipe - Force python interpreter to use UTF-8 Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>
#!/usr/bin/env python # -*- coding: utf-8 -*- from conans import ConanFile class CtreConan(ConanFile): name = "CTRE" version = "2.0" license = "MIT" url = "https://github.com/hanickadot/compile-time-regular-expressions.git" author = "Hana Dusíková (ctre@hanicka.net)" description = "Compile Time Regular Expression for C++17/20" homepage = "https://github.com/hanickadot/compile-time-regular-expressions" exports = "LICENSE" exports_sources = "include/*" no_copy_source = True def package(self): self.copy("*.hpp") def package_id(self): self.info.header_only()
<commit_before>from conans import ConanFile class CtreConan(ConanFile): name = "CTRE" version = "2.0" license = "MIT" url = "https://github.com/hanickadot/compile-time-regular-expressions.git" author = "Hana Dusíková (ctre@hanicka.net)" description = "Compile Time Regular Expression for C++17/20" homepage = "https://github.com/hanickadot/compile-time-regular-expressions" exports = "LICENSE" exports_sources = "include/*" no_copy_source = True def package(self): self.copy("*.hpp") def package_id(self): self.info.header_only() <commit_msg>Use UTF-8 in Conan recipe - Force python interpreter to use UTF-8 Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com><commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from conans import ConanFile class CtreConan(ConanFile): name = "CTRE" version = "2.0" license = "MIT" url = "https://github.com/hanickadot/compile-time-regular-expressions.git" author = "Hana Dusíková (ctre@hanicka.net)" description = "Compile Time Regular Expression for C++17/20" homepage = "https://github.com/hanickadot/compile-time-regular-expressions" exports = "LICENSE" exports_sources = "include/*" no_copy_source = True def package(self): self.copy("*.hpp") def package_id(self): self.info.header_only()
from conans import ConanFile class CtreConan(ConanFile): name = "CTRE" version = "2.0" license = "MIT" url = "https://github.com/hanickadot/compile-time-regular-expressions.git" author = "Hana Dusíková (ctre@hanicka.net)" description = "Compile Time Regular Expression for C++17/20" homepage = "https://github.com/hanickadot/compile-time-regular-expressions" exports = "LICENSE" exports_sources = "include/*" no_copy_source = True def package(self): self.copy("*.hpp") def package_id(self): self.info.header_only() Use UTF-8 in Conan recipe - Force python interpreter to use UTF-8 Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>#!/usr/bin/env python # -*- coding: utf-8 -*- from conans import ConanFile class CtreConan(ConanFile): name = "CTRE" version = "2.0" license = "MIT" url = "https://github.com/hanickadot/compile-time-regular-expressions.git" author = "Hana Dusíková (ctre@hanicka.net)" description = "Compile Time Regular Expression for C++17/20" homepage = "https://github.com/hanickadot/compile-time-regular-expressions" exports = "LICENSE" exports_sources = "include/*" no_copy_source = True def package(self): self.copy("*.hpp") def package_id(self): self.info.header_only()
<commit_before>from conans import ConanFile class CtreConan(ConanFile): name = "CTRE" version = "2.0" license = "MIT" url = "https://github.com/hanickadot/compile-time-regular-expressions.git" author = "Hana Dusíková (ctre@hanicka.net)" description = "Compile Time Regular Expression for C++17/20" homepage = "https://github.com/hanickadot/compile-time-regular-expressions" exports = "LICENSE" exports_sources = "include/*" no_copy_source = True def package(self): self.copy("*.hpp") def package_id(self): self.info.header_only() <commit_msg>Use UTF-8 in Conan recipe - Force python interpreter to use UTF-8 Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com><commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from conans import ConanFile class CtreConan(ConanFile): name = "CTRE" version = "2.0" license = "MIT" url = "https://github.com/hanickadot/compile-time-regular-expressions.git" author = "Hana Dusíková (ctre@hanicka.net)" description = "Compile Time Regular Expression for C++17/20" homepage = "https://github.com/hanickadot/compile-time-regular-expressions" exports = "LICENSE" exports_sources = "include/*" no_copy_source = True def package(self): self.copy("*.hpp") def package_id(self): self.info.header_only()
ae424937a7d9341862329cf7f04bd91ccdf345cd
converter.py
converter.py
import json import csv def json_to_csv(json_file): with open(json_file, 'r') as jsonfile, open('output.csv', 'w', newline='') as csvfile: jsn = json.load(jsonfile) fieldnames = [] for name in jsn[0]: fieldnames += [name] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for elem in jsn: writer.writerow(elem) def csv_to_json(csv_file): with open(csv_file, 'r') as csvfile, open('output.json', 'w') as jsonfile: reader = csv.DictReader(csvfile) jsn = [] for row in reader: jsn += [row] json.dump(jsn, jsonfile) filename = input('Enter filename of CSV file: ') csv_to_json(filename)
import json import csv import argparse def json_to_csv(json_file): with open(json_file, 'r') as jsonfile, open('output.csv', 'w', newline='') as csvfile: jsn = json.load(jsonfile) fieldnames = [] for name in jsn[0]: fieldnames += [name] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for elem in jsn: writer.writerow(elem) def csv_to_json(csv_file): with open(csv_file, 'r') as csvfile, open('output.json', 'w') as jsonfile: reader = csv.DictReader(csvfile) jsn = [] for row in reader: jsn += [row] json.dump(jsn, jsonfile) def main(): parser = argparse.ArgumentParser() parser.add_argument('inputtype', help='The type of input', type=str, choices=['csv', 'json']) parser.add_argument('filename', help='Name of the input file', type=str) args = parser.parse_args() if (args.inputtype=='csv'): csv_to_json(args.filename) outputtype = 'json' elif (args.inputtype=='json'): json_to_csv(args.filename) outputtype = 'csv' print('[*] Output saved as output.{}'.format(outputtype)) if __name__=='__main__': main()
Add command-line arguments and main function
Add command-line arguments and main function
Python
mit
SkullTech/json-csv-converter
import json import csv def json_to_csv(json_file): with open(json_file, 'r') as jsonfile, open('output.csv', 'w', newline='') as csvfile: jsn = json.load(jsonfile) fieldnames = [] for name in jsn[0]: fieldnames += [name] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for elem in jsn: writer.writerow(elem) def csv_to_json(csv_file): with open(csv_file, 'r') as csvfile, open('output.json', 'w') as jsonfile: reader = csv.DictReader(csvfile) jsn = [] for row in reader: jsn += [row] json.dump(jsn, jsonfile) filename = input('Enter filename of CSV file: ') csv_to_json(filename)Add command-line arguments and main function
import json import csv import argparse def json_to_csv(json_file): with open(json_file, 'r') as jsonfile, open('output.csv', 'w', newline='') as csvfile: jsn = json.load(jsonfile) fieldnames = [] for name in jsn[0]: fieldnames += [name] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for elem in jsn: writer.writerow(elem) def csv_to_json(csv_file): with open(csv_file, 'r') as csvfile, open('output.json', 'w') as jsonfile: reader = csv.DictReader(csvfile) jsn = [] for row in reader: jsn += [row] json.dump(jsn, jsonfile) def main(): parser = argparse.ArgumentParser() parser.add_argument('inputtype', help='The type of input', type=str, choices=['csv', 'json']) parser.add_argument('filename', help='Name of the input file', type=str) args = parser.parse_args() if (args.inputtype=='csv'): csv_to_json(args.filename) outputtype = 'json' elif (args.inputtype=='json'): json_to_csv(args.filename) outputtype = 'csv' print('[*] Output saved as output.{}'.format(outputtype)) if __name__=='__main__': main()
<commit_before>import json import csv def json_to_csv(json_file): with open(json_file, 'r') as jsonfile, open('output.csv', 'w', newline='') as csvfile: jsn = json.load(jsonfile) fieldnames = [] for name in jsn[0]: fieldnames += [name] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for elem in jsn: writer.writerow(elem) def csv_to_json(csv_file): with open(csv_file, 'r') as csvfile, open('output.json', 'w') as jsonfile: reader = csv.DictReader(csvfile) jsn = [] for row in reader: jsn += [row] json.dump(jsn, jsonfile) filename = input('Enter filename of CSV file: ') csv_to_json(filename)<commit_msg>Add command-line arguments and main function<commit_after>
import json import csv import argparse def json_to_csv(json_file): with open(json_file, 'r') as jsonfile, open('output.csv', 'w', newline='') as csvfile: jsn = json.load(jsonfile) fieldnames = [] for name in jsn[0]: fieldnames += [name] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for elem in jsn: writer.writerow(elem) def csv_to_json(csv_file): with open(csv_file, 'r') as csvfile, open('output.json', 'w') as jsonfile: reader = csv.DictReader(csvfile) jsn = [] for row in reader: jsn += [row] json.dump(jsn, jsonfile) def main(): parser = argparse.ArgumentParser() parser.add_argument('inputtype', help='The type of input', type=str, choices=['csv', 'json']) parser.add_argument('filename', help='Name of the input file', type=str) args = parser.parse_args() if (args.inputtype=='csv'): csv_to_json(args.filename) outputtype = 'json' elif (args.inputtype=='json'): json_to_csv(args.filename) outputtype = 'csv' print('[*] Output saved as output.{}'.format(outputtype)) if __name__=='__main__': main()
import json import csv def json_to_csv(json_file): with open(json_file, 'r') as jsonfile, open('output.csv', 'w', newline='') as csvfile: jsn = json.load(jsonfile) fieldnames = [] for name in jsn[0]: fieldnames += [name] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for elem in jsn: writer.writerow(elem) def csv_to_json(csv_file): with open(csv_file, 'r') as csvfile, open('output.json', 'w') as jsonfile: reader = csv.DictReader(csvfile) jsn = [] for row in reader: jsn += [row] json.dump(jsn, jsonfile) filename = input('Enter filename of CSV file: ') csv_to_json(filename)Add command-line arguments and main functionimport json import csv import argparse def json_to_csv(json_file): with open(json_file, 'r') as jsonfile, open('output.csv', 'w', newline='') as csvfile: jsn = json.load(jsonfile) fieldnames = [] for name in jsn[0]: fieldnames += [name] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for elem in jsn: writer.writerow(elem) def csv_to_json(csv_file): with open(csv_file, 'r') as csvfile, open('output.json', 'w') as jsonfile: reader = csv.DictReader(csvfile) jsn = [] for row in reader: jsn += [row] json.dump(jsn, jsonfile) def main(): parser = argparse.ArgumentParser() parser.add_argument('inputtype', help='The type of input', type=str, choices=['csv', 'json']) parser.add_argument('filename', help='Name of the input file', type=str) args = parser.parse_args() if (args.inputtype=='csv'): csv_to_json(args.filename) outputtype = 'json' elif (args.inputtype=='json'): json_to_csv(args.filename) outputtype = 'csv' print('[*] Output saved as output.{}'.format(outputtype)) if __name__=='__main__': main()
<commit_before>import json import csv def json_to_csv(json_file): with open(json_file, 'r') as jsonfile, open('output.csv', 'w', newline='') as csvfile: jsn = json.load(jsonfile) fieldnames = [] for name in jsn[0]: fieldnames += [name] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for elem in jsn: writer.writerow(elem) def csv_to_json(csv_file): with open(csv_file, 'r') as csvfile, open('output.json', 'w') as jsonfile: reader = csv.DictReader(csvfile) jsn = [] for row in reader: jsn += [row] json.dump(jsn, jsonfile) filename = input('Enter filename of CSV file: ') csv_to_json(filename)<commit_msg>Add command-line arguments and main function<commit_after>import json import csv import argparse def json_to_csv(json_file): with open(json_file, 'r') as jsonfile, open('output.csv', 'w', newline='') as csvfile: jsn = json.load(jsonfile) fieldnames = [] for name in jsn[0]: fieldnames += [name] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for elem in jsn: writer.writerow(elem) def csv_to_json(csv_file): with open(csv_file, 'r') as csvfile, open('output.json', 'w') as jsonfile: reader = csv.DictReader(csvfile) jsn = [] for row in reader: jsn += [row] json.dump(jsn, jsonfile) def main(): parser = argparse.ArgumentParser() parser.add_argument('inputtype', help='The type of input', type=str, choices=['csv', 'json']) parser.add_argument('filename', help='Name of the input file', type=str) args = parser.parse_args() if (args.inputtype=='csv'): csv_to_json(args.filename) outputtype = 'json' elif (args.inputtype=='json'): json_to_csv(args.filename) outputtype = 'csv' print('[*] Output saved as output.{}'.format(outputtype)) if __name__=='__main__': main()
6a5413ce81a606476734d9b37b33f683ed0c85e3
cards/card.py
cards/card.py
""" Created on Dec 04, 2016 @author: john papa Copyright 2016 John Papa. All rights reserved. This work is licensed under the MIT License. """ from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): return f"{self._rank} of {self._suit}" @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """
""" Created on Dec 04, 2016 @author: john papa Copyright 2016 John Papa. All rights reserved. This work is licensed under the MIT License. """ from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): # return f"{self.rank} of {self.suit}" return "{0} of {1}".format(self.rank, self.suit) @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """
Switch to pre-python 3.6 string formatting for Codeship
Switch to pre-python 3.6 string formatting for Codeship
Python
mit
johnpapa2/twenty-one,johnpapa2/twenty-one
""" Created on Dec 04, 2016 @author: john papa Copyright 2016 John Papa. All rights reserved. This work is licensed under the MIT License. """ from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): return f"{self._rank} of {self._suit}" @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """ Switch to pre-python 3.6 string formatting for Codeship
""" Created on Dec 04, 2016 @author: john papa Copyright 2016 John Papa. All rights reserved. This work is licensed under the MIT License. """ from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): # return f"{self.rank} of {self.suit}" return "{0} of {1}".format(self.rank, self.suit) @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """
<commit_before>""" Created on Dec 04, 2016 @author: john papa Copyright 2016 John Papa. All rights reserved. This work is licensed under the MIT License. """ from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): return f"{self._rank} of {self._suit}" @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """ <commit_msg>Switch to pre-python 3.6 string formatting for Codeship<commit_after>
""" Created on Dec 04, 2016 @author: john papa Copyright 2016 John Papa. All rights reserved. This work is licensed under the MIT License. """ from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): # return f"{self.rank} of {self.suit}" return "{0} of {1}".format(self.rank, self.suit) @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """
""" Created on Dec 04, 2016 @author: john papa Copyright 2016 John Papa. All rights reserved. This work is licensed under the MIT License. """ from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): return f"{self._rank} of {self._suit}" @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """ Switch to pre-python 3.6 string formatting for Codeship""" Created on Dec 04, 2016 @author: john papa Copyright 2016 John Papa. All rights reserved. This work is licensed under the MIT License. """ from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): # return f"{self.rank} of {self.suit}" return "{0} of {1}".format(self.rank, self.suit) @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """
<commit_before>""" Created on Dec 04, 2016 @author: john papa Copyright 2016 John Papa. All rights reserved. This work is licensed under the MIT License. """ from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): return f"{self._rank} of {self._suit}" @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """ <commit_msg>Switch to pre-python 3.6 string formatting for Codeship<commit_after>""" Created on Dec 04, 2016 @author: john papa Copyright 2016 John Papa. All rights reserved. This work is licensed under the MIT License. """ from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): # return f"{self.rank} of {self.suit}" return "{0} of {1}".format(self.rank, self.suit) @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """
58daf6f2225cdf52079072eee47f23a6d188cfa9
resync/resource_set.py
resync/resource_set.py
"""A set of Resource objects used for Capability List Indexes and ResourceSync Description documents. FIXME - what should the ordering be? """ class ResourceSet(dict): """Implementation of class to store resources in Capability List Indexes and ResourceSync Description documents. Key properties of this class are: - has add(resource) method - is iterable and results given in alphanumeric order by resource.uri """ def __iter__(self): """Iterator over all the resources in this resource_list""" self._iter_next_list = sorted(self.keys()) self._iter_next_list.reverse() return(iter(self._iter_next, None)) def _iter_next(self): if (len(self._iter_next_list)>0): return(self[self._iter_next_list.pop()]) else: return(None) def add(self, resource, replace=False): """Add just a single resource""" uri = resource.uri if (uri in self and not replace): raise ResourceSetDupeError("Attempt to add resource already in this set") self[uri]=resource class ResourceSetDupeError(Exception): pass
"""A set of Resource objects used for Capability List Indexes and ResourceSync Description documents. Ordinging is currently alphanumeric (using sorted(..)) on the uri which is the key. """ class ResourceSet(dict): """Implementation of class to store resources in Capability List Indexes and ResourceSync Description documents. Key properties of this class are: - has add(resource) method - is iterable and results given in alphanumeric order by resource.uri """ def __iter__(self): """Iterator over all the resources in this resource_list""" self._iter_next_list = sorted(self.keys()) self._iter_next_list.reverse() return(iter(self._iter_next, None)) def _iter_next(self): if (len(self._iter_next_list)>0): return(self[self._iter_next_list.pop()]) else: return(None) def add(self, resource, replace=False): """Add just a single resource""" uri = resource.uri if (uri in self and not replace): raise ResourceSetDupeError("Attempt to add resource already in this set") self[uri]=resource class ResourceSetDupeError(Exception): pass
Change comment to indicate choice of alphanum order by uri
Change comment to indicate choice of alphanum order by uri
Python
apache-2.0
lindareijnhoudt/resync,resync/resync,lindareijnhoudt/resync,dans-er/resync,dans-er/resync
"""A set of Resource objects used for Capability List Indexes and ResourceSync Description documents. FIXME - what should the ordering be? """ class ResourceSet(dict): """Implementation of class to store resources in Capability List Indexes and ResourceSync Description documents. Key properties of this class are: - has add(resource) method - is iterable and results given in alphanumeric order by resource.uri """ def __iter__(self): """Iterator over all the resources in this resource_list""" self._iter_next_list = sorted(self.keys()) self._iter_next_list.reverse() return(iter(self._iter_next, None)) def _iter_next(self): if (len(self._iter_next_list)>0): return(self[self._iter_next_list.pop()]) else: return(None) def add(self, resource, replace=False): """Add just a single resource""" uri = resource.uri if (uri in self and not replace): raise ResourceSetDupeError("Attempt to add resource already in this set") self[uri]=resource class ResourceSetDupeError(Exception): pass Change comment to indicate choice of alphanum order by uri
"""A set of Resource objects used for Capability List Indexes and ResourceSync Description documents. Ordinging is currently alphanumeric (using sorted(..)) on the uri which is the key. """ class ResourceSet(dict): """Implementation of class to store resources in Capability List Indexes and ResourceSync Description documents. Key properties of this class are: - has add(resource) method - is iterable and results given in alphanumeric order by resource.uri """ def __iter__(self): """Iterator over all the resources in this resource_list""" self._iter_next_list = sorted(self.keys()) self._iter_next_list.reverse() return(iter(self._iter_next, None)) def _iter_next(self): if (len(self._iter_next_list)>0): return(self[self._iter_next_list.pop()]) else: return(None) def add(self, resource, replace=False): """Add just a single resource""" uri = resource.uri if (uri in self and not replace): raise ResourceSetDupeError("Attempt to add resource already in this set") self[uri]=resource class ResourceSetDupeError(Exception): pass
<commit_before>"""A set of Resource objects used for Capability List Indexes and ResourceSync Description documents. FIXME - what should the ordering be? """ class ResourceSet(dict): """Implementation of class to store resources in Capability List Indexes and ResourceSync Description documents. Key properties of this class are: - has add(resource) method - is iterable and results given in alphanumeric order by resource.uri """ def __iter__(self): """Iterator over all the resources in this resource_list""" self._iter_next_list = sorted(self.keys()) self._iter_next_list.reverse() return(iter(self._iter_next, None)) def _iter_next(self): if (len(self._iter_next_list)>0): return(self[self._iter_next_list.pop()]) else: return(None) def add(self, resource, replace=False): """Add just a single resource""" uri = resource.uri if (uri in self and not replace): raise ResourceSetDupeError("Attempt to add resource already in this set") self[uri]=resource class ResourceSetDupeError(Exception): pass <commit_msg>Change comment to indicate choice of alphanum order by uri<commit_after>
"""A set of Resource objects used for Capability List Indexes and ResourceSync Description documents. Ordinging is currently alphanumeric (using sorted(..)) on the uri which is the key. """ class ResourceSet(dict): """Implementation of class to store resources in Capability List Indexes and ResourceSync Description documents. Key properties of this class are: - has add(resource) method - is iterable and results given in alphanumeric order by resource.uri """ def __iter__(self): """Iterator over all the resources in this resource_list""" self._iter_next_list = sorted(self.keys()) self._iter_next_list.reverse() return(iter(self._iter_next, None)) def _iter_next(self): if (len(self._iter_next_list)>0): return(self[self._iter_next_list.pop()]) else: return(None) def add(self, resource, replace=False): """Add just a single resource""" uri = resource.uri if (uri in self and not replace): raise ResourceSetDupeError("Attempt to add resource already in this set") self[uri]=resource class ResourceSetDupeError(Exception): pass
"""A set of Resource objects used for Capability List Indexes and ResourceSync Description documents. FIXME - what should the ordering be? """ class ResourceSet(dict): """Implementation of class to store resources in Capability List Indexes and ResourceSync Description documents. Key properties of this class are: - has add(resource) method - is iterable and results given in alphanumeric order by resource.uri """ def __iter__(self): """Iterator over all the resources in this resource_list""" self._iter_next_list = sorted(self.keys()) self._iter_next_list.reverse() return(iter(self._iter_next, None)) def _iter_next(self): if (len(self._iter_next_list)>0): return(self[self._iter_next_list.pop()]) else: return(None) def add(self, resource, replace=False): """Add just a single resource""" uri = resource.uri if (uri in self and not replace): raise ResourceSetDupeError("Attempt to add resource already in this set") self[uri]=resource class ResourceSetDupeError(Exception): pass Change comment to indicate choice of alphanum order by uri"""A set of Resource objects used for Capability List Indexes and ResourceSync Description documents. Ordinging is currently alphanumeric (using sorted(..)) on the uri which is the key. """ class ResourceSet(dict): """Implementation of class to store resources in Capability List Indexes and ResourceSync Description documents. Key properties of this class are: - has add(resource) method - is iterable and results given in alphanumeric order by resource.uri """ def __iter__(self): """Iterator over all the resources in this resource_list""" self._iter_next_list = sorted(self.keys()) self._iter_next_list.reverse() return(iter(self._iter_next, None)) def _iter_next(self): if (len(self._iter_next_list)>0): return(self[self._iter_next_list.pop()]) else: return(None) def add(self, resource, replace=False): """Add just a single resource""" uri = resource.uri if (uri in self and not replace): raise ResourceSetDupeError("Attempt to add resource already in this set") self[uri]=resource class ResourceSetDupeError(Exception): pass
<commit_before>"""A set of Resource objects used for Capability List Indexes and ResourceSync Description documents. FIXME - what should the ordering be? """ class ResourceSet(dict): """Implementation of class to store resources in Capability List Indexes and ResourceSync Description documents. Key properties of this class are: - has add(resource) method - is iterable and results given in alphanumeric order by resource.uri """ def __iter__(self): """Iterator over all the resources in this resource_list""" self._iter_next_list = sorted(self.keys()) self._iter_next_list.reverse() return(iter(self._iter_next, None)) def _iter_next(self): if (len(self._iter_next_list)>0): return(self[self._iter_next_list.pop()]) else: return(None) def add(self, resource, replace=False): """Add just a single resource""" uri = resource.uri if (uri in self and not replace): raise ResourceSetDupeError("Attempt to add resource already in this set") self[uri]=resource class ResourceSetDupeError(Exception): pass <commit_msg>Change comment to indicate choice of alphanum order by uri<commit_after>"""A set of Resource objects used for Capability List Indexes and ResourceSync Description documents. Ordinging is currently alphanumeric (using sorted(..)) on the uri which is the key. """ class ResourceSet(dict): """Implementation of class to store resources in Capability List Indexes and ResourceSync Description documents. Key properties of this class are: - has add(resource) method - is iterable and results given in alphanumeric order by resource.uri """ def __iter__(self): """Iterator over all the resources in this resource_list""" self._iter_next_list = sorted(self.keys()) self._iter_next_list.reverse() return(iter(self._iter_next, None)) def _iter_next(self): if (len(self._iter_next_list)>0): return(self[self._iter_next_list.pop()]) else: return(None) def add(self, resource, replace=False): """Add just a single resource""" uri = resource.uri if (uri in self and not replace): raise ResourceSetDupeError("Attempt to add resource already in this set") self[uri]=resource class ResourceSetDupeError(Exception): pass
d2ad097e08b8c5e9d318968f0a6f859f03f7c07a
mycli/packages/special/dbcommands.py
mycli/packages/special/dbcommands.py
import logging from .main import special_command, RAW_QUERY, PARSED_QUERY log = logging.getLogger(__name__) @special_command('\\dt', '\\dt', 'List or describe tables.', arg_type=PARSED_QUERY, case_sensitive=True) def list_tables(cur, arg=None, arg_type=PARSED_QUERY): if arg: query = 'SHOW FIELDS FROM {0}'.format(arg) else: query = 'SHOW TABLES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] @special_command('\\l', '\\l', 'List databases.', arg_type=RAW_QUERY, case_sensitive=True) def list_databases(cur, **_): query = 'SHOW DATABASES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')]
import logging from .main import special_command, RAW_QUERY, PARSED_QUERY log = logging.getLogger(__name__) @special_command('\\dt', '\\dt [table]', 'List or describe tables.', arg_type=PARSED_QUERY, case_sensitive=True) def list_tables(cur, arg=None, arg_type=PARSED_QUERY): if arg: query = 'SHOW FIELDS FROM {0}'.format(arg) else: query = 'SHOW TABLES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] @special_command('\\l', '\\l', 'List databases.', arg_type=RAW_QUERY, case_sensitive=True) def list_databases(cur, **_): query = 'SHOW DATABASES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')]
Change \dt syntax to add an optional table name.
Change \dt syntax to add an optional table name.
Python
bsd-3-clause
mdsrosa/mycli,jinstrive/mycli,j-bennet/mycli,martijnengler/mycli,martijnengler/mycli,mdsrosa/mycli,danieljwest/mycli,j-bennet/mycli,jinstrive/mycli,shoma/mycli,danieljwest/mycli,shoma/mycli
import logging from .main import special_command, RAW_QUERY, PARSED_QUERY log = logging.getLogger(__name__) @special_command('\\dt', '\\dt', 'List or describe tables.', arg_type=PARSED_QUERY, case_sensitive=True) def list_tables(cur, arg=None, arg_type=PARSED_QUERY): if arg: query = 'SHOW FIELDS FROM {0}'.format(arg) else: query = 'SHOW TABLES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] @special_command('\\l', '\\l', 'List databases.', arg_type=RAW_QUERY, case_sensitive=True) def list_databases(cur, **_): query = 'SHOW DATABASES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] Change \dt syntax to add an optional table name.
import logging from .main import special_command, RAW_QUERY, PARSED_QUERY log = logging.getLogger(__name__) @special_command('\\dt', '\\dt [table]', 'List or describe tables.', arg_type=PARSED_QUERY, case_sensitive=True) def list_tables(cur, arg=None, arg_type=PARSED_QUERY): if arg: query = 'SHOW FIELDS FROM {0}'.format(arg) else: query = 'SHOW TABLES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] @special_command('\\l', '\\l', 'List databases.', arg_type=RAW_QUERY, case_sensitive=True) def list_databases(cur, **_): query = 'SHOW DATABASES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')]
<commit_before>import logging from .main import special_command, RAW_QUERY, PARSED_QUERY log = logging.getLogger(__name__) @special_command('\\dt', '\\dt', 'List or describe tables.', arg_type=PARSED_QUERY, case_sensitive=True) def list_tables(cur, arg=None, arg_type=PARSED_QUERY): if arg: query = 'SHOW FIELDS FROM {0}'.format(arg) else: query = 'SHOW TABLES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] @special_command('\\l', '\\l', 'List databases.', arg_type=RAW_QUERY, case_sensitive=True) def list_databases(cur, **_): query = 'SHOW DATABASES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] <commit_msg>Change \dt syntax to add an optional table name.<commit_after>
import logging from .main import special_command, RAW_QUERY, PARSED_QUERY log = logging.getLogger(__name__) @special_command('\\dt', '\\dt [table]', 'List or describe tables.', arg_type=PARSED_QUERY, case_sensitive=True) def list_tables(cur, arg=None, arg_type=PARSED_QUERY): if arg: query = 'SHOW FIELDS FROM {0}'.format(arg) else: query = 'SHOW TABLES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] @special_command('\\l', '\\l', 'List databases.', arg_type=RAW_QUERY, case_sensitive=True) def list_databases(cur, **_): query = 'SHOW DATABASES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')]
import logging from .main import special_command, RAW_QUERY, PARSED_QUERY log = logging.getLogger(__name__) @special_command('\\dt', '\\dt', 'List or describe tables.', arg_type=PARSED_QUERY, case_sensitive=True) def list_tables(cur, arg=None, arg_type=PARSED_QUERY): if arg: query = 'SHOW FIELDS FROM {0}'.format(arg) else: query = 'SHOW TABLES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] @special_command('\\l', '\\l', 'List databases.', arg_type=RAW_QUERY, case_sensitive=True) def list_databases(cur, **_): query = 'SHOW DATABASES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] Change \dt syntax to add an optional table name.import logging from .main import special_command, RAW_QUERY, PARSED_QUERY log = logging.getLogger(__name__) @special_command('\\dt', '\\dt [table]', 'List or describe tables.', arg_type=PARSED_QUERY, case_sensitive=True) def list_tables(cur, arg=None, arg_type=PARSED_QUERY): if arg: query = 'SHOW FIELDS FROM {0}'.format(arg) else: query = 'SHOW TABLES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] @special_command('\\l', '\\l', 'List databases.', arg_type=RAW_QUERY, case_sensitive=True) def list_databases(cur, **_): query = 'SHOW DATABASES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')]
<commit_before>import logging from .main import special_command, RAW_QUERY, PARSED_QUERY log = logging.getLogger(__name__) @special_command('\\dt', '\\dt', 'List or describe tables.', arg_type=PARSED_QUERY, case_sensitive=True) def list_tables(cur, arg=None, arg_type=PARSED_QUERY): if arg: query = 'SHOW FIELDS FROM {0}'.format(arg) else: query = 'SHOW TABLES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] @special_command('\\l', '\\l', 'List databases.', arg_type=RAW_QUERY, case_sensitive=True) def list_databases(cur, **_): query = 'SHOW DATABASES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] <commit_msg>Change \dt syntax to add an optional table name.<commit_after>import logging from .main import special_command, RAW_QUERY, PARSED_QUERY log = logging.getLogger(__name__) @special_command('\\dt', '\\dt [table]', 'List or describe tables.', arg_type=PARSED_QUERY, case_sensitive=True) def list_tables(cur, arg=None, arg_type=PARSED_QUERY): if arg: query = 'SHOW FIELDS FROM {0}'.format(arg) else: query = 'SHOW TABLES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')] @special_command('\\l', '\\l', 'List databases.', arg_type=RAW_QUERY, case_sensitive=True) def list_databases(cur, **_): query = 'SHOW DATABASES' log.debug(query) cur.execute(query) if cur.description: headers = [x[0] for x in cur.description] return [(None, cur, headers, '')] else: return [(None, None, None, '')]
b6096dbe06f636a462f2c1ff85470599754f613f
app/main/views/sub_navigation_dictionaries.py
app/main/views/sub_navigation_dictionaries.py
def features_nav(): return [ { "name": "Features", "link": "main.features", }, { "name": "Roadmap", "link": "main.roadmap", }, { "name": "Security", "link": "main.security", }, { "name": "Performance", "link": "https://www.gov.uk/performance/govuk-notify", "external_link": True, }, { "name": "Terms of use", "link": "main.terms", }, { "name": "Using Notify", "link": "main.using_notify", }, ]
def features_nav(): return [ { "name": "Features", "link": "main.features", }, { "name": "Roadmap", "link": "main.roadmap", }, { "name": "Security", "link": "main.security", }, { "name": "Terms of use", "link": "main.terms", }, { "name": "Using Notify", "link": "main.using_notify", }, ]
Remove performance link from features nav
Remove performance link from features nav The features nav is supposed to navigate your between pages in the app. It’s very unexpected to have it open an external link. Performance isn’t strictly a part of Support, but it’s worked having it there for long enough that it’s probably not a bother.
Python
mit
gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
def features_nav(): return [ { "name": "Features", "link": "main.features", }, { "name": "Roadmap", "link": "main.roadmap", }, { "name": "Security", "link": "main.security", }, { "name": "Performance", "link": "https://www.gov.uk/performance/govuk-notify", "external_link": True, }, { "name": "Terms of use", "link": "main.terms", }, { "name": "Using Notify", "link": "main.using_notify", }, ] Remove performance link from features nav The features nav is supposed to navigate your between pages in the app. It’s very unexpected to have it open an external link. Performance isn’t strictly a part of Support, but it’s worked having it there for long enough that it’s probably not a bother.
def features_nav(): return [ { "name": "Features", "link": "main.features", }, { "name": "Roadmap", "link": "main.roadmap", }, { "name": "Security", "link": "main.security", }, { "name": "Terms of use", "link": "main.terms", }, { "name": "Using Notify", "link": "main.using_notify", }, ]
<commit_before>def features_nav(): return [ { "name": "Features", "link": "main.features", }, { "name": "Roadmap", "link": "main.roadmap", }, { "name": "Security", "link": "main.security", }, { "name": "Performance", "link": "https://www.gov.uk/performance/govuk-notify", "external_link": True, }, { "name": "Terms of use", "link": "main.terms", }, { "name": "Using Notify", "link": "main.using_notify", }, ] <commit_msg>Remove performance link from features nav The features nav is supposed to navigate your between pages in the app. It’s very unexpected to have it open an external link. Performance isn’t strictly a part of Support, but it’s worked having it there for long enough that it’s probably not a bother.<commit_after>
def features_nav(): return [ { "name": "Features", "link": "main.features", }, { "name": "Roadmap", "link": "main.roadmap", }, { "name": "Security", "link": "main.security", }, { "name": "Terms of use", "link": "main.terms", }, { "name": "Using Notify", "link": "main.using_notify", }, ]
def features_nav(): return [ { "name": "Features", "link": "main.features", }, { "name": "Roadmap", "link": "main.roadmap", }, { "name": "Security", "link": "main.security", }, { "name": "Performance", "link": "https://www.gov.uk/performance/govuk-notify", "external_link": True, }, { "name": "Terms of use", "link": "main.terms", }, { "name": "Using Notify", "link": "main.using_notify", }, ] Remove performance link from features nav The features nav is supposed to navigate your between pages in the app. It’s very unexpected to have it open an external link. Performance isn’t strictly a part of Support, but it’s worked having it there for long enough that it’s probably not a bother.def features_nav(): return [ { "name": "Features", "link": "main.features", }, { "name": "Roadmap", "link": "main.roadmap", }, { "name": "Security", "link": "main.security", }, { "name": "Terms of use", "link": "main.terms", }, { "name": "Using Notify", "link": "main.using_notify", }, ]
<commit_before>def features_nav(): return [ { "name": "Features", "link": "main.features", }, { "name": "Roadmap", "link": "main.roadmap", }, { "name": "Security", "link": "main.security", }, { "name": "Performance", "link": "https://www.gov.uk/performance/govuk-notify", "external_link": True, }, { "name": "Terms of use", "link": "main.terms", }, { "name": "Using Notify", "link": "main.using_notify", }, ] <commit_msg>Remove performance link from features nav The features nav is supposed to navigate your between pages in the app. It’s very unexpected to have it open an external link. Performance isn’t strictly a part of Support, but it’s worked having it there for long enough that it’s probably not a bother.<commit_after>def features_nav(): return [ { "name": "Features", "link": "main.features", }, { "name": "Roadmap", "link": "main.roadmap", }, { "name": "Security", "link": "main.security", }, { "name": "Terms of use", "link": "main.terms", }, { "name": "Using Notify", "link": "main.using_notify", }, ]
d565786278eaf32761957dd1e064a5d549ef3ab4
praw/models/reddit/mixins/savable.py
praw/models/reddit/mixins/savable.py
"""Provide the SavableMixin class.""" from ....const import API_PATH class SavableMixin(object): """Interface for RedditBase classes that can be saved.""" def save(self, category=None): """Save the object. :param category: The category to save to (Default: None). """ self._reddit.post(API_PATH['save'], data={'category': category, 'id': self.fullname}) def unsave(self): """Unsave the object.""" self._reddit.post(API_PATH['unsave'], data={'id': self.fullname})
"""Provide the SavableMixin class.""" from ....const import API_PATH class SavableMixin(object): """Interface for RedditBase classes that can be saved.""" def save(self, category=None): """Save the object. :param category: (Gold) The category to save to (Default: None). If your user does not have gold this value is ignored by Reddit. """ self._reddit.post(API_PATH['save'], data={'category': category, 'id': self.fullname}) def unsave(self): """Unsave the object.""" self._reddit.post(API_PATH['unsave'], data={'id': self.fullname})
Clarify that category is a gold feature for saving an item
Clarify that category is a gold feature for saving an item
Python
bsd-2-clause
13steinj/praw,RGood/praw,RGood/praw,darthkedrik/praw,darthkedrik/praw,leviroth/praw,gschizas/praw,leviroth/praw,gschizas/praw,praw-dev/praw,nmtake/praw,praw-dev/praw,nmtake/praw,13steinj/praw
"""Provide the SavableMixin class.""" from ....const import API_PATH class SavableMixin(object): """Interface for RedditBase classes that can be saved.""" def save(self, category=None): """Save the object. :param category: The category to save to (Default: None). """ self._reddit.post(API_PATH['save'], data={'category': category, 'id': self.fullname}) def unsave(self): """Unsave the object.""" self._reddit.post(API_PATH['unsave'], data={'id': self.fullname}) Clarify that category is a gold feature for saving an item
"""Provide the SavableMixin class.""" from ....const import API_PATH class SavableMixin(object): """Interface for RedditBase classes that can be saved.""" def save(self, category=None): """Save the object. :param category: (Gold) The category to save to (Default: None). If your user does not have gold this value is ignored by Reddit. """ self._reddit.post(API_PATH['save'], data={'category': category, 'id': self.fullname}) def unsave(self): """Unsave the object.""" self._reddit.post(API_PATH['unsave'], data={'id': self.fullname})
<commit_before>"""Provide the SavableMixin class.""" from ....const import API_PATH class SavableMixin(object): """Interface for RedditBase classes that can be saved.""" def save(self, category=None): """Save the object. :param category: The category to save to (Default: None). """ self._reddit.post(API_PATH['save'], data={'category': category, 'id': self.fullname}) def unsave(self): """Unsave the object.""" self._reddit.post(API_PATH['unsave'], data={'id': self.fullname}) <commit_msg>Clarify that category is a gold feature for saving an item<commit_after>
"""Provide the SavableMixin class.""" from ....const import API_PATH class SavableMixin(object): """Interface for RedditBase classes that can be saved.""" def save(self, category=None): """Save the object. :param category: (Gold) The category to save to (Default: None). If your user does not have gold this value is ignored by Reddit. """ self._reddit.post(API_PATH['save'], data={'category': category, 'id': self.fullname}) def unsave(self): """Unsave the object.""" self._reddit.post(API_PATH['unsave'], data={'id': self.fullname})
"""Provide the SavableMixin class.""" from ....const import API_PATH class SavableMixin(object): """Interface for RedditBase classes that can be saved.""" def save(self, category=None): """Save the object. :param category: The category to save to (Default: None). """ self._reddit.post(API_PATH['save'], data={'category': category, 'id': self.fullname}) def unsave(self): """Unsave the object.""" self._reddit.post(API_PATH['unsave'], data={'id': self.fullname}) Clarify that category is a gold feature for saving an item"""Provide the SavableMixin class.""" from ....const import API_PATH class SavableMixin(object): """Interface for RedditBase classes that can be saved.""" def save(self, category=None): """Save the object. :param category: (Gold) The category to save to (Default: None). If your user does not have gold this value is ignored by Reddit. """ self._reddit.post(API_PATH['save'], data={'category': category, 'id': self.fullname}) def unsave(self): """Unsave the object.""" self._reddit.post(API_PATH['unsave'], data={'id': self.fullname})
<commit_before>"""Provide the SavableMixin class.""" from ....const import API_PATH class SavableMixin(object): """Interface for RedditBase classes that can be saved.""" def save(self, category=None): """Save the object. :param category: The category to save to (Default: None). """ self._reddit.post(API_PATH['save'], data={'category': category, 'id': self.fullname}) def unsave(self): """Unsave the object.""" self._reddit.post(API_PATH['unsave'], data={'id': self.fullname}) <commit_msg>Clarify that category is a gold feature for saving an item<commit_after>"""Provide the SavableMixin class.""" from ....const import API_PATH class SavableMixin(object): """Interface for RedditBase classes that can be saved.""" def save(self, category=None): """Save the object. :param category: (Gold) The category to save to (Default: None). If your user does not have gold this value is ignored by Reddit. """ self._reddit.post(API_PATH['save'], data={'category': category, 'id': self.fullname}) def unsave(self): """Unsave the object.""" self._reddit.post(API_PATH['unsave'], data={'id': self.fullname})
55dfcce3d2c42433249f401ff5021820c341a691
entity_networks/activations.py
entity_networks/activations.py
from __future__ import absolute_import from __future__ import print_function from __future__ import division import tensorflow as tf def prelu(features, initializer=tf.constant_initializer(1), name=None): """ Implementation of [Parametric ReLU](https://arxiv.org/abs/1502.01852) borrowed from Keras. """ with tf.variable_scope(name or 'PReLU'): alpha = tf.get_variable('alpha', shape=features.get_shape().as_list()[1:], initializer=initializer) pos = tf.nn.relu(features) neg = alpha * (features - tf.abs(features)) * 0.5 return pos + neg
from __future__ import absolute_import from __future__ import print_function from __future__ import division import tensorflow as tf def prelu(features, initializer=None, name=None): """ Implementation of [Parametric ReLU](https://arxiv.org/abs/1502.01852) borrowed from Keras. """ with tf.variable_scope(name or 'PReLU'): alpha = tf.get_variable('alpha', shape=features.get_shape().as_list()[1:], initializer=initializer) pos = tf.nn.relu(features) neg = alpha * (features - tf.abs(features)) * 0.5 return pos + neg
Remove default initializer from prelu
Remove default initializer from prelu
Python
mit
mikalyoung/recurrent-entity-networks,jimfleming/recurrent-entity-networks,mikalyoung/recurrent-entity-networks,jimfleming/recurrent-entity-networks
from __future__ import absolute_import from __future__ import print_function from __future__ import division import tensorflow as tf def prelu(features, initializer=tf.constant_initializer(1), name=None): """ Implementation of [Parametric ReLU](https://arxiv.org/abs/1502.01852) borrowed from Keras. """ with tf.variable_scope(name or 'PReLU'): alpha = tf.get_variable('alpha', shape=features.get_shape().as_list()[1:], initializer=initializer) pos = tf.nn.relu(features) neg = alpha * (features - tf.abs(features)) * 0.5 return pos + neg Remove default initializer from prelu
from __future__ import absolute_import from __future__ import print_function from __future__ import division import tensorflow as tf def prelu(features, initializer=None, name=None): """ Implementation of [Parametric ReLU](https://arxiv.org/abs/1502.01852) borrowed from Keras. """ with tf.variable_scope(name or 'PReLU'): alpha = tf.get_variable('alpha', shape=features.get_shape().as_list()[1:], initializer=initializer) pos = tf.nn.relu(features) neg = alpha * (features - tf.abs(features)) * 0.5 return pos + neg
<commit_before>from __future__ import absolute_import from __future__ import print_function from __future__ import division import tensorflow as tf def prelu(features, initializer=tf.constant_initializer(1), name=None): """ Implementation of [Parametric ReLU](https://arxiv.org/abs/1502.01852) borrowed from Keras. """ with tf.variable_scope(name or 'PReLU'): alpha = tf.get_variable('alpha', shape=features.get_shape().as_list()[1:], initializer=initializer) pos = tf.nn.relu(features) neg = alpha * (features - tf.abs(features)) * 0.5 return pos + neg <commit_msg>Remove default initializer from prelu<commit_after>
from __future__ import absolute_import from __future__ import print_function from __future__ import division import tensorflow as tf def prelu(features, initializer=None, name=None): """ Implementation of [Parametric ReLU](https://arxiv.org/abs/1502.01852) borrowed from Keras. """ with tf.variable_scope(name or 'PReLU'): alpha = tf.get_variable('alpha', shape=features.get_shape().as_list()[1:], initializer=initializer) pos = tf.nn.relu(features) neg = alpha * (features - tf.abs(features)) * 0.5 return pos + neg
from __future__ import absolute_import from __future__ import print_function from __future__ import division import tensorflow as tf def prelu(features, initializer=tf.constant_initializer(1), name=None): """ Implementation of [Parametric ReLU](https://arxiv.org/abs/1502.01852) borrowed from Keras. """ with tf.variable_scope(name or 'PReLU'): alpha = tf.get_variable('alpha', shape=features.get_shape().as_list()[1:], initializer=initializer) pos = tf.nn.relu(features) neg = alpha * (features - tf.abs(features)) * 0.5 return pos + neg Remove default initializer from prelufrom __future__ import absolute_import from __future__ import print_function from __future__ import division import tensorflow as tf def prelu(features, initializer=None, name=None): """ Implementation of [Parametric ReLU](https://arxiv.org/abs/1502.01852) borrowed from Keras. """ with tf.variable_scope(name or 'PReLU'): alpha = tf.get_variable('alpha', shape=features.get_shape().as_list()[1:], initializer=initializer) pos = tf.nn.relu(features) neg = alpha * (features - tf.abs(features)) * 0.5 return pos + neg
<commit_before>from __future__ import absolute_import from __future__ import print_function from __future__ import division import tensorflow as tf def prelu(features, initializer=tf.constant_initializer(1), name=None): """ Implementation of [Parametric ReLU](https://arxiv.org/abs/1502.01852) borrowed from Keras. """ with tf.variable_scope(name or 'PReLU'): alpha = tf.get_variable('alpha', shape=features.get_shape().as_list()[1:], initializer=initializer) pos = tf.nn.relu(features) neg = alpha * (features - tf.abs(features)) * 0.5 return pos + neg <commit_msg>Remove default initializer from prelu<commit_after>from __future__ import absolute_import from __future__ import print_function from __future__ import division import tensorflow as tf def prelu(features, initializer=None, name=None): """ Implementation of [Parametric ReLU](https://arxiv.org/abs/1502.01852) borrowed from Keras. """ with tf.variable_scope(name or 'PReLU'): alpha = tf.get_variable('alpha', shape=features.get_shape().as_list()[1:], initializer=initializer) pos = tf.nn.relu(features) neg = alpha * (features - tf.abs(features)) * 0.5 return pos + neg
8a8d4905c169b9a1060f1283d0286c433af24f43
word2gauss/words.py
word2gauss/words.py
from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize(doc, remove_oov=False, return_ids=True) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size))
from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize_ids(doc, remove_oov=False) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size))
Change the interface on tokenize in vocabulary
Change the interface on tokenize in vocabulary
Python
mit
seomoz/word2gauss,seomoz/word2gauss
from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize(doc, remove_oov=False, return_ids=True) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size)) Change the interface on tokenize in vocabulary
from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize_ids(doc, remove_oov=False) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size))
<commit_before> from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize(doc, remove_oov=False, return_ids=True) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size)) <commit_msg>Change the interface on tokenize in vocabulary<commit_after>
from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize_ids(doc, remove_oov=False) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size))
from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize(doc, remove_oov=False, return_ids=True) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size)) Change the interface on tokenize in vocabulary from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize_ids(doc, remove_oov=False) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size))
<commit_before> from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize(doc, remove_oov=False, return_ids=True) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size)) <commit_msg>Change the interface on tokenize in vocabulary<commit_after> from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize_ids(doc, remove_oov=False) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size))
11220e1df49a2fb7dfd4032bb03f595188d8178f
buildPy2app.py
buildPy2app.py
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'LSMinimumSystemVersion':'10.11.0', 'NSHumanReadableCopyright': '@ 2018 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
Set 10.11.0 as minimum macOS version in the .app bundle
Set 10.11.0 as minimum macOS version in the .app bundle
Python
apache-2.0
NeverDecaf/syncplay,alby128/syncplay,NeverDecaf/syncplay,Syncplay/syncplay,alby128/syncplay,Syncplay/syncplay
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], ) Set 10.11.0 as minimum macOS version in the .app bundle
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'LSMinimumSystemVersion':'10.11.0', 'NSHumanReadableCopyright': '@ 2018 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
<commit_before>""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], ) <commit_msg>Set 10.11.0 as minimum macOS version in the .app bundle<commit_after>
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'LSMinimumSystemVersion':'10.11.0', 'NSHumanReadableCopyright': '@ 2018 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], ) Set 10.11.0 as minimum macOS version in the .app bundle""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'LSMinimumSystemVersion':'10.11.0', 'NSHumanReadableCopyright': '@ 2018 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
<commit_before>""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], ) <commit_msg>Set 10.11.0 as minimum macOS version in the .app bundle<commit_after>""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'LSMinimumSystemVersion':'10.11.0', 'NSHumanReadableCopyright': '@ 2018 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
85df39bb82907dfec52735be3096c926c6b8bd54
src/waldur_mastermind/marketplace/migrations/0034_change_offering_geo_data.py
src/waldur_mastermind/marketplace/migrations/0034_change_offering_geo_data.py
# Generated by Django 2.2.13 on 2020-11-23 13:44 from django.db import migrations, models def fill_new_geo_fields(apps, schema_editor): Offering = apps.get_model('marketplace', 'Offering') for offering in Offering.objects.all(): if not offering.geolocations: geolocation = offering.geolocations[0] offering.latitude = geolocation['latitude'] offering.longitude = geolocation['longitude'] offering.save() class Migration(migrations.Migration): dependencies = [ ('marketplace', '0033_mandatory_offering_type'), ] operations = [ migrations.AddField( model_name='offering', name='latitude', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='offering', name='longitude', field=models.FloatField(blank=True, null=True), ), migrations.RunPython(fill_new_geo_fields), migrations.RemoveField(model_name='offering', name='geolocations',), ]
# Generated by Django 2.2.13 on 2020-11-23 13:44 from django.db import migrations, models def fill_new_geo_fields(apps, schema_editor): Offering = apps.get_model('marketplace', 'Offering') for offering in Offering.objects.all(): if offering.geolocations: geolocation = offering.geolocations[0] offering.latitude = geolocation['latitude'] offering.longitude = geolocation['longitude'] offering.save() class Migration(migrations.Migration): dependencies = [ ('marketplace', '0033_mandatory_offering_type'), ] operations = [ migrations.AddField( model_name='offering', name='latitude', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='offering', name='longitude', field=models.FloatField(blank=True, null=True), ), migrations.RunPython(fill_new_geo_fields), migrations.RemoveField(model_name='offering', name='geolocations',), ]
Fix typo in database migration script.
Fix typo in database migration script.
Python
mit
opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
# Generated by Django 2.2.13 on 2020-11-23 13:44 from django.db import migrations, models def fill_new_geo_fields(apps, schema_editor): Offering = apps.get_model('marketplace', 'Offering') for offering in Offering.objects.all(): if not offering.geolocations: geolocation = offering.geolocations[0] offering.latitude = geolocation['latitude'] offering.longitude = geolocation['longitude'] offering.save() class Migration(migrations.Migration): dependencies = [ ('marketplace', '0033_mandatory_offering_type'), ] operations = [ migrations.AddField( model_name='offering', name='latitude', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='offering', name='longitude', field=models.FloatField(blank=True, null=True), ), migrations.RunPython(fill_new_geo_fields), migrations.RemoveField(model_name='offering', name='geolocations',), ] Fix typo in database migration script.
# Generated by Django 2.2.13 on 2020-11-23 13:44 from django.db import migrations, models def fill_new_geo_fields(apps, schema_editor): Offering = apps.get_model('marketplace', 'Offering') for offering in Offering.objects.all(): if offering.geolocations: geolocation = offering.geolocations[0] offering.latitude = geolocation['latitude'] offering.longitude = geolocation['longitude'] offering.save() class Migration(migrations.Migration): dependencies = [ ('marketplace', '0033_mandatory_offering_type'), ] operations = [ migrations.AddField( model_name='offering', name='latitude', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='offering', name='longitude', field=models.FloatField(blank=True, null=True), ), migrations.RunPython(fill_new_geo_fields), migrations.RemoveField(model_name='offering', name='geolocations',), ]
<commit_before># Generated by Django 2.2.13 on 2020-11-23 13:44 from django.db import migrations, models def fill_new_geo_fields(apps, schema_editor): Offering = apps.get_model('marketplace', 'Offering') for offering in Offering.objects.all(): if not offering.geolocations: geolocation = offering.geolocations[0] offering.latitude = geolocation['latitude'] offering.longitude = geolocation['longitude'] offering.save() class Migration(migrations.Migration): dependencies = [ ('marketplace', '0033_mandatory_offering_type'), ] operations = [ migrations.AddField( model_name='offering', name='latitude', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='offering', name='longitude', field=models.FloatField(blank=True, null=True), ), migrations.RunPython(fill_new_geo_fields), migrations.RemoveField(model_name='offering', name='geolocations',), ] <commit_msg>Fix typo in database migration script.<commit_after>
# Generated by Django 2.2.13 on 2020-11-23 13:44 from django.db import migrations, models def fill_new_geo_fields(apps, schema_editor): Offering = apps.get_model('marketplace', 'Offering') for offering in Offering.objects.all(): if offering.geolocations: geolocation = offering.geolocations[0] offering.latitude = geolocation['latitude'] offering.longitude = geolocation['longitude'] offering.save() class Migration(migrations.Migration): dependencies = [ ('marketplace', '0033_mandatory_offering_type'), ] operations = [ migrations.AddField( model_name='offering', name='latitude', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='offering', name='longitude', field=models.FloatField(blank=True, null=True), ), migrations.RunPython(fill_new_geo_fields), migrations.RemoveField(model_name='offering', name='geolocations',), ]
# Generated by Django 2.2.13 on 2020-11-23 13:44 from django.db import migrations, models def fill_new_geo_fields(apps, schema_editor): Offering = apps.get_model('marketplace', 'Offering') for offering in Offering.objects.all(): if not offering.geolocations: geolocation = offering.geolocations[0] offering.latitude = geolocation['latitude'] offering.longitude = geolocation['longitude'] offering.save() class Migration(migrations.Migration): dependencies = [ ('marketplace', '0033_mandatory_offering_type'), ] operations = [ migrations.AddField( model_name='offering', name='latitude', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='offering', name='longitude', field=models.FloatField(blank=True, null=True), ), migrations.RunPython(fill_new_geo_fields), migrations.RemoveField(model_name='offering', name='geolocations',), ] Fix typo in database migration script.# Generated by Django 2.2.13 on 2020-11-23 13:44 from django.db import migrations, models def fill_new_geo_fields(apps, schema_editor): Offering = apps.get_model('marketplace', 'Offering') for offering in Offering.objects.all(): if offering.geolocations: geolocation = offering.geolocations[0] offering.latitude = geolocation['latitude'] offering.longitude = geolocation['longitude'] offering.save() class Migration(migrations.Migration): dependencies = [ ('marketplace', '0033_mandatory_offering_type'), ] operations = [ migrations.AddField( model_name='offering', name='latitude', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='offering', name='longitude', field=models.FloatField(blank=True, null=True), ), migrations.RunPython(fill_new_geo_fields), migrations.RemoveField(model_name='offering', name='geolocations',), ]
<commit_before># Generated by Django 2.2.13 on 2020-11-23 13:44 from django.db import migrations, models def fill_new_geo_fields(apps, schema_editor): Offering = apps.get_model('marketplace', 'Offering') for offering in Offering.objects.all(): if not offering.geolocations: geolocation = offering.geolocations[0] offering.latitude = geolocation['latitude'] offering.longitude = geolocation['longitude'] offering.save() class Migration(migrations.Migration): dependencies = [ ('marketplace', '0033_mandatory_offering_type'), ] operations = [ migrations.AddField( model_name='offering', name='latitude', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='offering', name='longitude', field=models.FloatField(blank=True, null=True), ), migrations.RunPython(fill_new_geo_fields), migrations.RemoveField(model_name='offering', name='geolocations',), ] <commit_msg>Fix typo in database migration script.<commit_after># Generated by Django 2.2.13 on 2020-11-23 13:44 from django.db import migrations, models def fill_new_geo_fields(apps, schema_editor): Offering = apps.get_model('marketplace', 'Offering') for offering in Offering.objects.all(): if offering.geolocations: geolocation = offering.geolocations[0] offering.latitude = geolocation['latitude'] offering.longitude = geolocation['longitude'] offering.save() class Migration(migrations.Migration): dependencies = [ ('marketplace', '0033_mandatory_offering_type'), ] operations = [ migrations.AddField( model_name='offering', name='latitude', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='offering', name='longitude', field=models.FloatField(blank=True, null=True), ), migrations.RunPython(fill_new_geo_fields), migrations.RemoveField(model_name='offering', name='geolocations',), ]
8111a7e32ec80a35f16c081664946292111485fe
scripts/create_shop.py
scripts/create_shop.py
#!/usr/bin/env python """Create a shop with article and order sequences. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from byceps.util.system import get_config_filename_from_env_or_exit from _util import app_context @click.command() @click.argument('shop_id',) @click.argument('title') @click.argument('email_config_id',) @click.argument('article_prefix') @click.argument('order_prefix') def execute(shop_id, title, email_config_id, article_prefix, order_prefix): shop = shop_service.create_shop(shop_id, title, email_config_id) sequence_service.create_article_number_sequence(shop.id, article_prefix) sequence_service.create_order_number_sequence(shop.id, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute()
#!/usr/bin/env python """Create a shop with article and order sequences. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from byceps.util.system import get_config_filename_from_env_or_exit from _util import app_context @click.command() @click.argument('shop_id') @click.argument('title') @click.argument('email_config_id') @click.argument('article_prefix') @click.argument('order_prefix') def execute(shop_id, title, email_config_id, article_prefix, order_prefix): shop = shop_service.create_shop(shop_id, title, email_config_id) sequence_service.create_article_number_sequence(shop.id, article_prefix) sequence_service.create_order_number_sequence(shop.id, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute()
Remove unintentional commas from argument list
Remove unintentional commas from argument list
Python
bsd-3-clause
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
#!/usr/bin/env python """Create a shop with article and order sequences. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from byceps.util.system import get_config_filename_from_env_or_exit from _util import app_context @click.command() @click.argument('shop_id',) @click.argument('title') @click.argument('email_config_id',) @click.argument('article_prefix') @click.argument('order_prefix') def execute(shop_id, title, email_config_id, article_prefix, order_prefix): shop = shop_service.create_shop(shop_id, title, email_config_id) sequence_service.create_article_number_sequence(shop.id, article_prefix) sequence_service.create_order_number_sequence(shop.id, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute() Remove unintentional commas from argument list
#!/usr/bin/env python """Create a shop with article and order sequences. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from byceps.util.system import get_config_filename_from_env_or_exit from _util import app_context @click.command() @click.argument('shop_id') @click.argument('title') @click.argument('email_config_id') @click.argument('article_prefix') @click.argument('order_prefix') def execute(shop_id, title, email_config_id, article_prefix, order_prefix): shop = shop_service.create_shop(shop_id, title, email_config_id) sequence_service.create_article_number_sequence(shop.id, article_prefix) sequence_service.create_order_number_sequence(shop.id, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute()
<commit_before>#!/usr/bin/env python """Create a shop with article and order sequences. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from byceps.util.system import get_config_filename_from_env_or_exit from _util import app_context @click.command() @click.argument('shop_id',) @click.argument('title') @click.argument('email_config_id',) @click.argument('article_prefix') @click.argument('order_prefix') def execute(shop_id, title, email_config_id, article_prefix, order_prefix): shop = shop_service.create_shop(shop_id, title, email_config_id) sequence_service.create_article_number_sequence(shop.id, article_prefix) sequence_service.create_order_number_sequence(shop.id, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute() <commit_msg>Remove unintentional commas from argument list<commit_after>
#!/usr/bin/env python """Create a shop with article and order sequences. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from byceps.util.system import get_config_filename_from_env_or_exit from _util import app_context @click.command() @click.argument('shop_id') @click.argument('title') @click.argument('email_config_id') @click.argument('article_prefix') @click.argument('order_prefix') def execute(shop_id, title, email_config_id, article_prefix, order_prefix): shop = shop_service.create_shop(shop_id, title, email_config_id) sequence_service.create_article_number_sequence(shop.id, article_prefix) sequence_service.create_order_number_sequence(shop.id, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute()
#!/usr/bin/env python """Create a shop with article and order sequences. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from byceps.util.system import get_config_filename_from_env_or_exit from _util import app_context @click.command() @click.argument('shop_id',) @click.argument('title') @click.argument('email_config_id',) @click.argument('article_prefix') @click.argument('order_prefix') def execute(shop_id, title, email_config_id, article_prefix, order_prefix): shop = shop_service.create_shop(shop_id, title, email_config_id) sequence_service.create_article_number_sequence(shop.id, article_prefix) sequence_service.create_order_number_sequence(shop.id, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute() Remove unintentional commas from argument list#!/usr/bin/env python """Create a shop with article and order sequences. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from byceps.util.system import get_config_filename_from_env_or_exit from _util import app_context @click.command() @click.argument('shop_id') @click.argument('title') @click.argument('email_config_id') @click.argument('article_prefix') @click.argument('order_prefix') def execute(shop_id, title, email_config_id, article_prefix, order_prefix): shop = shop_service.create_shop(shop_id, title, email_config_id) sequence_service.create_article_number_sequence(shop.id, article_prefix) sequence_service.create_order_number_sequence(shop.id, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute()
<commit_before>#!/usr/bin/env python """Create a shop with article and order sequences. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from byceps.util.system import get_config_filename_from_env_or_exit from _util import app_context @click.command() @click.argument('shop_id',) @click.argument('title') @click.argument('email_config_id',) @click.argument('article_prefix') @click.argument('order_prefix') def execute(shop_id, title, email_config_id, article_prefix, order_prefix): shop = shop_service.create_shop(shop_id, title, email_config_id) sequence_service.create_article_number_sequence(shop.id, article_prefix) sequence_service.create_order_number_sequence(shop.id, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute() <commit_msg>Remove unintentional commas from argument list<commit_after>#!/usr/bin/env python """Create a shop with article and order sequences. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from byceps.util.system import get_config_filename_from_env_or_exit from _util import app_context @click.command() @click.argument('shop_id') @click.argument('title') @click.argument('email_config_id') @click.argument('article_prefix') @click.argument('order_prefix') def execute(shop_id, title, email_config_id, article_prefix, order_prefix): shop = shop_service.create_shop(shop_id, title, email_config_id) sequence_service.create_article_number_sequence(shop.id, article_prefix) sequence_service.create_order_number_sequence(shop.id, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute()
8348f46fb78b55c5d2bcd6401f4041e8890072db
gviewer/keys/vim.py
gviewer/keys/vim.py
from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl b", "page up")] )
from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl d", "page down"), ("ctrl b", "page up"), ("ctrl u", "page up")] )
Add ctrl+d/ctrl+u for page down and page up
Add ctrl+d/ctrl+u for page down and page up
Python
mit
chhsiao90/gviewer
from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl b", "page up")] ) Add ctrl+d/ctrl+u for page down and page up
from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl d", "page down"), ("ctrl b", "page up"), ("ctrl u", "page up")] )
<commit_before>from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl b", "page up")] ) <commit_msg>Add ctrl+d/ctrl+u for page down and page up<commit_after>
from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl d", "page down"), ("ctrl b", "page up"), ("ctrl u", "page up")] )
from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl b", "page up")] ) Add ctrl+d/ctrl+u for page down and page upfrom collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl d", "page down"), ("ctrl b", "page up"), ("ctrl u", "page up")] )
<commit_before>from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl b", "page up")] ) <commit_msg>Add ctrl+d/ctrl+u for page down and page up<commit_after>from collections import OrderedDict keys = OrderedDict([ ("j", "down"), ("k", "up"), ("ctrl f", "page down"), ("ctrl d", "page down"), ("ctrl b", "page up"), ("ctrl u", "page up")] )
e2e9a7a0339ae269a239156972595d6ff590cebe
src/yunohost/data_migrations/0009_migrate_to_apps_json.py
src/yunohost/data_migrations/0009_migrate_to_apps_json.py
from moulinette.utils.log import getActionLogger from yunohost.app import app_fetchlist, app_removelist from yunohost.tools import Migration logger = getActionLogger('yunohost.migration') class MyMigration(Migration): "Migrate from official.json to apps.json" def migrate(self): # Remove official.json list app_removelist(name="yunohost") # Replace by apps.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/apps.json") def backward(self): # Remove apps.json list app_removelist(name="yunohost") # Replace by official.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/official.json")
from moulinette.utils.log import getActionLogger from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list from yunohost.tools import Migration logger = getActionLogger('yunohost.migration') class MyMigration(Migration): "Migrate from official.json to apps.json" def migrate(self): # Remove all the deprecated lists lists_to_remove = [ "https://app.yunohost.org/official.json", "https://app.yunohost.org/community.json", "https://labriqueinter.net/apps/labriqueinternet.json" ] appslists = _read_appslist_list() for appslist, infos in appslists.items(): if infos["url"] in lists_to_remove: app_removelist(name=appslist) # Replace by apps.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/apps.json") def backward(self): # Remove apps.json list app_removelist(name="yunohost") # Replace by official.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/official.json")
Remove all deprecated lists, not just 'yunohost'
Remove all deprecated lists, not just 'yunohost'
Python
agpl-3.0
YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost
from moulinette.utils.log import getActionLogger from yunohost.app import app_fetchlist, app_removelist from yunohost.tools import Migration logger = getActionLogger('yunohost.migration') class MyMigration(Migration): "Migrate from official.json to apps.json" def migrate(self): # Remove official.json list app_removelist(name="yunohost") # Replace by apps.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/apps.json") def backward(self): # Remove apps.json list app_removelist(name="yunohost") # Replace by official.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/official.json") Remove all deprecated lists, not just 'yunohost'
from moulinette.utils.log import getActionLogger from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list from yunohost.tools import Migration logger = getActionLogger('yunohost.migration') class MyMigration(Migration): "Migrate from official.json to apps.json" def migrate(self): # Remove all the deprecated lists lists_to_remove = [ "https://app.yunohost.org/official.json", "https://app.yunohost.org/community.json", "https://labriqueinter.net/apps/labriqueinternet.json" ] appslists = _read_appslist_list() for appslist, infos in appslists.items(): if infos["url"] in lists_to_remove: app_removelist(name=appslist) # Replace by apps.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/apps.json") def backward(self): # Remove apps.json list app_removelist(name="yunohost") # Replace by official.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/official.json")
<commit_before>from moulinette.utils.log import getActionLogger from yunohost.app import app_fetchlist, app_removelist from yunohost.tools import Migration logger = getActionLogger('yunohost.migration') class MyMigration(Migration): "Migrate from official.json to apps.json" def migrate(self): # Remove official.json list app_removelist(name="yunohost") # Replace by apps.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/apps.json") def backward(self): # Remove apps.json list app_removelist(name="yunohost") # Replace by official.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/official.json") <commit_msg>Remove all deprecated lists, not just 'yunohost'<commit_after>
from moulinette.utils.log import getActionLogger from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list from yunohost.tools import Migration logger = getActionLogger('yunohost.migration') class MyMigration(Migration): "Migrate from official.json to apps.json" def migrate(self): # Remove all the deprecated lists lists_to_remove = [ "https://app.yunohost.org/official.json", "https://app.yunohost.org/community.json", "https://labriqueinter.net/apps/labriqueinternet.json" ] appslists = _read_appslist_list() for appslist, infos in appslists.items(): if infos["url"] in lists_to_remove: app_removelist(name=appslist) # Replace by apps.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/apps.json") def backward(self): # Remove apps.json list app_removelist(name="yunohost") # Replace by official.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/official.json")
from moulinette.utils.log import getActionLogger from yunohost.app import app_fetchlist, app_removelist from yunohost.tools import Migration logger = getActionLogger('yunohost.migration') class MyMigration(Migration): "Migrate from official.json to apps.json" def migrate(self): # Remove official.json list app_removelist(name="yunohost") # Replace by apps.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/apps.json") def backward(self): # Remove apps.json list app_removelist(name="yunohost") # Replace by official.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/official.json") Remove all deprecated lists, not just 'yunohost'from moulinette.utils.log import getActionLogger from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list from yunohost.tools import Migration logger = getActionLogger('yunohost.migration') class MyMigration(Migration): "Migrate from official.json to apps.json" def migrate(self): # Remove all the deprecated lists lists_to_remove = [ "https://app.yunohost.org/official.json", "https://app.yunohost.org/community.json", "https://labriqueinter.net/apps/labriqueinternet.json" ] appslists = _read_appslist_list() for appslist, infos in appslists.items(): if infos["url"] in lists_to_remove: app_removelist(name=appslist) # Replace by apps.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/apps.json") def backward(self): # Remove apps.json list app_removelist(name="yunohost") # Replace by official.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/official.json")
<commit_before>from moulinette.utils.log import getActionLogger from yunohost.app import app_fetchlist, app_removelist from yunohost.tools import Migration logger = getActionLogger('yunohost.migration') class MyMigration(Migration): "Migrate from official.json to apps.json" def migrate(self): # Remove official.json list app_removelist(name="yunohost") # Replace by apps.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/apps.json") def backward(self): # Remove apps.json list app_removelist(name="yunohost") # Replace by official.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/official.json") <commit_msg>Remove all deprecated lists, not just 'yunohost'<commit_after>from moulinette.utils.log import getActionLogger from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list from yunohost.tools import Migration logger = getActionLogger('yunohost.migration') class MyMigration(Migration): "Migrate from official.json to apps.json" def migrate(self): # Remove all the deprecated lists lists_to_remove = [ "https://app.yunohost.org/official.json", "https://app.yunohost.org/community.json", "https://labriqueinter.net/apps/labriqueinternet.json" ] appslists = _read_appslist_list() for appslist, infos in appslists.items(): if infos["url"] in lists_to_remove: app_removelist(name=appslist) # Replace by apps.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/apps.json") def backward(self): # Remove apps.json list app_removelist(name="yunohost") # Replace by official.json list app_fetchlist(name="yunohost", url="https://app.yunohost.org/official.json")
8b5973b5581fb6da27891f8c2256886c1dc7e8a8
server/src/db_layer.py
server/src/db_layer.py
from pymongo import MongoClient # Magic decorator for defining constants def constant(f): def fset(self, value): raise TypeError def fget(self): return f() return property(fget, fset) class Model: def __init__(self): pass @staticmethod @constant def COLLECTION_NAME(): return Model.__name__ class Post(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "posts" class GideonDatabaseClient: @staticmethod @constant def DATABASE_NAME(): return "test-database" def __init__(self): self.client = MongoClient("mongodb://localhost:27017/") self.db = self.client[GideonDatabaseClient.DATABASE_NAME()] def get_collection(self, model_cls): return self.db[model_cls.COLLECTION_NAME()]
from pymongo import MongoClient # Magic decorator for defining constants def constant(f): def fset(self, value): raise TypeError def fget(self): return f() return property(fget, fset) class Model: def __init__(self): pass @staticmethod @constant def COLLECTION_NAME(): return Model.__name__ class Post(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "posts" class User(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "users" class GideonDatabaseClient: @staticmethod @constant def DATABASE_NAME(): return "test-database" def __init__(self): self.client = MongoClient("mongodb://localhost:27017/") self.db = self.client[GideonDatabaseClient.DATABASE_NAME()] def get_collection(self, model_cls): return self.db[model_cls.COLLECTION_NAME()]
Add users to the list of models.
Add users to the list of models.
Python
mit
Opportunity-Hack-2015-Arizona/Team1,Opportunity-Hack-2015-Arizona/Team1,Opportunity-Hack-2015-Arizona/Team1
from pymongo import MongoClient # Magic decorator for defining constants def constant(f): def fset(self, value): raise TypeError def fget(self): return f() return property(fget, fset) class Model: def __init__(self): pass @staticmethod @constant def COLLECTION_NAME(): return Model.__name__ class Post(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "posts" class GideonDatabaseClient: @staticmethod @constant def DATABASE_NAME(): return "test-database" def __init__(self): self.client = MongoClient("mongodb://localhost:27017/") self.db = self.client[GideonDatabaseClient.DATABASE_NAME()] def get_collection(self, model_cls): return self.db[model_cls.COLLECTION_NAME()] Add users to the list of models.
from pymongo import MongoClient # Magic decorator for defining constants def constant(f): def fset(self, value): raise TypeError def fget(self): return f() return property(fget, fset) class Model: def __init__(self): pass @staticmethod @constant def COLLECTION_NAME(): return Model.__name__ class Post(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "posts" class User(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "users" class GideonDatabaseClient: @staticmethod @constant def DATABASE_NAME(): return "test-database" def __init__(self): self.client = MongoClient("mongodb://localhost:27017/") self.db = self.client[GideonDatabaseClient.DATABASE_NAME()] def get_collection(self, model_cls): return self.db[model_cls.COLLECTION_NAME()]
<commit_before>from pymongo import MongoClient # Magic decorator for defining constants def constant(f): def fset(self, value): raise TypeError def fget(self): return f() return property(fget, fset) class Model: def __init__(self): pass @staticmethod @constant def COLLECTION_NAME(): return Model.__name__ class Post(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "posts" class GideonDatabaseClient: @staticmethod @constant def DATABASE_NAME(): return "test-database" def __init__(self): self.client = MongoClient("mongodb://localhost:27017/") self.db = self.client[GideonDatabaseClient.DATABASE_NAME()] def get_collection(self, model_cls): return self.db[model_cls.COLLECTION_NAME()] <commit_msg>Add users to the list of models.<commit_after>
from pymongo import MongoClient # Magic decorator for defining constants def constant(f): def fset(self, value): raise TypeError def fget(self): return f() return property(fget, fset) class Model: def __init__(self): pass @staticmethod @constant def COLLECTION_NAME(): return Model.__name__ class Post(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "posts" class User(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "users" class GideonDatabaseClient: @staticmethod @constant def DATABASE_NAME(): return "test-database" def __init__(self): self.client = MongoClient("mongodb://localhost:27017/") self.db = self.client[GideonDatabaseClient.DATABASE_NAME()] def get_collection(self, model_cls): return self.db[model_cls.COLLECTION_NAME()]
from pymongo import MongoClient # Magic decorator for defining constants def constant(f): def fset(self, value): raise TypeError def fget(self): return f() return property(fget, fset) class Model: def __init__(self): pass @staticmethod @constant def COLLECTION_NAME(): return Model.__name__ class Post(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "posts" class GideonDatabaseClient: @staticmethod @constant def DATABASE_NAME(): return "test-database" def __init__(self): self.client = MongoClient("mongodb://localhost:27017/") self.db = self.client[GideonDatabaseClient.DATABASE_NAME()] def get_collection(self, model_cls): return self.db[model_cls.COLLECTION_NAME()] Add users to the list of models.from pymongo import MongoClient # Magic decorator for defining constants def constant(f): def fset(self, value): raise TypeError def fget(self): return f() return property(fget, fset) class Model: def __init__(self): pass @staticmethod @constant def COLLECTION_NAME(): return Model.__name__ class Post(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "posts" class User(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "users" class GideonDatabaseClient: @staticmethod @constant def DATABASE_NAME(): return "test-database" def __init__(self): self.client = MongoClient("mongodb://localhost:27017/") self.db = self.client[GideonDatabaseClient.DATABASE_NAME()] def get_collection(self, model_cls): return self.db[model_cls.COLLECTION_NAME()]
<commit_before>from pymongo import MongoClient # Magic decorator for defining constants def constant(f): def fset(self, value): raise TypeError def fget(self): return f() return property(fget, fset) class Model: def __init__(self): pass @staticmethod @constant def COLLECTION_NAME(): return Model.__name__ class Post(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "posts" class GideonDatabaseClient: @staticmethod @constant def DATABASE_NAME(): return "test-database" def __init__(self): self.client = MongoClient("mongodb://localhost:27017/") self.db = self.client[GideonDatabaseClient.DATABASE_NAME()] def get_collection(self, model_cls): return self.db[model_cls.COLLECTION_NAME()] <commit_msg>Add users to the list of models.<commit_after>from pymongo import MongoClient # Magic decorator for defining constants def constant(f): def fset(self, value): raise TypeError def fget(self): return f() return property(fget, fset) class Model: def __init__(self): pass @staticmethod @constant def COLLECTION_NAME(): return Model.__name__ class Post(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "posts" class User(Model): def __init__(self): Model.__init__(self) @staticmethod @constant def COLLECTION_NAME(): return "users" class GideonDatabaseClient: @staticmethod @constant def DATABASE_NAME(): return "test-database" def __init__(self): self.client = MongoClient("mongodb://localhost:27017/") self.db = self.client[GideonDatabaseClient.DATABASE_NAME()] def get_collection(self, model_cls): return self.db[model_cls.COLLECTION_NAME()]
8ee2bed47efadf8bedf086295c0f67850fad6876
pytest-{{cookiecutter.plugin_name}}/pytest_{{cookiecutter.plugin_name}}.py
pytest-{{cookiecutter.plugin_name}}/pytest_{{cookiecutter.plugin_name}}.py
# -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store_const', dest='foo', help='alias for --foo' ) @pytest.fixture def bar(request): return request.config.option.foo
# -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store', dest='foo', help='alias for --foo' ) @pytest.fixture def bar(request): return request.config.option.foo
Fix action in plugin module
Fix action in plugin module
Python
mit
s0undt3ch/cookiecutter-pytest-plugin,luzfcb/cookiecutter-pytest-plugin,pytest-dev/cookiecutter-pytest-plugin
# -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store_const', dest='foo', help='alias for --foo' ) @pytest.fixture def bar(request): return request.config.option.foo Fix action in plugin module
# -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store', dest='foo', help='alias for --foo' ) @pytest.fixture def bar(request): return request.config.option.foo
<commit_before># -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store_const', dest='foo', help='alias for --foo' ) @pytest.fixture def bar(request): return request.config.option.foo <commit_msg>Fix action in plugin module<commit_after>
# -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store', dest='foo', help='alias for --foo' ) @pytest.fixture def bar(request): return request.config.option.foo
# -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store_const', dest='foo', help='alias for --foo' ) @pytest.fixture def bar(request): return request.config.option.foo Fix action in plugin module# -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store', dest='foo', help='alias for --foo' ) @pytest.fixture def bar(request): return request.config.option.foo
<commit_before># -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store_const', dest='foo', help='alias for --foo' ) @pytest.fixture def bar(request): return request.config.option.foo <commit_msg>Fix action in plugin module<commit_after># -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store', dest='foo', help='alias for --foo' ) @pytest.fixture def bar(request): return request.config.option.foo
849fbdf724528df99f2ac53d389274f7c2631f11
invitation/admin.py
invitation/admin.py
from django.contrib import admin from invitation.models import InvitationKey, InvitationUser, InvitationRequest, InvitationCode class InvitationKeyAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'from_user', 'date_invited', 'key_expired') class InvitationUserAdmin(admin.ModelAdmin): list_display = ('inviter', 'invitations_remaining') def invite_user(modeladmin, request, queryset): for invitation_request in queryset.all(): invitation = InvitationKey.objects.create_invitation(request.user) invitation.send_to(invitation_request.email) invitation_request.invited = True invitation_request.save() invite_user.short_description = "Invite this user" class InvitationRequestAdmin(admin.ModelAdmin): list_display = ('email', 'invited') actions = [invite_user] admin.site.register(InvitationKey, InvitationKeyAdmin) admin.site.register(InvitationUser, InvitationUserAdmin) admin.site.register(InvitationRequest, InvitationRequestAdmin) admin.site.register(InvitationCode)
from django.contrib import admin from invitation.models import InvitationKey, InvitationUser, InvitationRequest, InvitationCode class InvitationKeyAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'from_user', 'date_invited', 'key_expired') class InvitationUserAdmin(admin.ModelAdmin): list_display = ('inviter', 'invitations_remaining') def invite_user(modeladmin, request, queryset): for invitation_request in queryset.all(): invitation = InvitationKey.objects.create_invitation(request.user) invitation.send_to(invitation_request.email) invitation_request.invited = True invitation_request.save() invite_user.short_description = "Invite selected invitation requests" class InvitationRequestAdmin(admin.ModelAdmin): list_display = ('email', 'invited') actions = [invite_user] admin.site.register(InvitationKey, InvitationKeyAdmin) admin.site.register(InvitationUser, InvitationUserAdmin) admin.site.register(InvitationRequest, InvitationRequestAdmin) admin.site.register(InvitationCode)
Improve the invite_user action name.
Improve the invite_user action name.
Python
bsd-3-clause
adieu/django-invitation
from django.contrib import admin from invitation.models import InvitationKey, InvitationUser, InvitationRequest, InvitationCode class InvitationKeyAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'from_user', 'date_invited', 'key_expired') class InvitationUserAdmin(admin.ModelAdmin): list_display = ('inviter', 'invitations_remaining') def invite_user(modeladmin, request, queryset): for invitation_request in queryset.all(): invitation = InvitationKey.objects.create_invitation(request.user) invitation.send_to(invitation_request.email) invitation_request.invited = True invitation_request.save() invite_user.short_description = "Invite this user" class InvitationRequestAdmin(admin.ModelAdmin): list_display = ('email', 'invited') actions = [invite_user] admin.site.register(InvitationKey, InvitationKeyAdmin) admin.site.register(InvitationUser, InvitationUserAdmin) admin.site.register(InvitationRequest, InvitationRequestAdmin) admin.site.register(InvitationCode) Improve the invite_user action name.
from django.contrib import admin from invitation.models import InvitationKey, InvitationUser, InvitationRequest, InvitationCode class InvitationKeyAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'from_user', 'date_invited', 'key_expired') class InvitationUserAdmin(admin.ModelAdmin): list_display = ('inviter', 'invitations_remaining') def invite_user(modeladmin, request, queryset): for invitation_request in queryset.all(): invitation = InvitationKey.objects.create_invitation(request.user) invitation.send_to(invitation_request.email) invitation_request.invited = True invitation_request.save() invite_user.short_description = "Invite selected invitation requests" class InvitationRequestAdmin(admin.ModelAdmin): list_display = ('email', 'invited') actions = [invite_user] admin.site.register(InvitationKey, InvitationKeyAdmin) admin.site.register(InvitationUser, InvitationUserAdmin) admin.site.register(InvitationRequest, InvitationRequestAdmin) admin.site.register(InvitationCode)
<commit_before>from django.contrib import admin from invitation.models import InvitationKey, InvitationUser, InvitationRequest, InvitationCode class InvitationKeyAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'from_user', 'date_invited', 'key_expired') class InvitationUserAdmin(admin.ModelAdmin): list_display = ('inviter', 'invitations_remaining') def invite_user(modeladmin, request, queryset): for invitation_request in queryset.all(): invitation = InvitationKey.objects.create_invitation(request.user) invitation.send_to(invitation_request.email) invitation_request.invited = True invitation_request.save() invite_user.short_description = "Invite this user" class InvitationRequestAdmin(admin.ModelAdmin): list_display = ('email', 'invited') actions = [invite_user] admin.site.register(InvitationKey, InvitationKeyAdmin) admin.site.register(InvitationUser, InvitationUserAdmin) admin.site.register(InvitationRequest, InvitationRequestAdmin) admin.site.register(InvitationCode) <commit_msg>Improve the invite_user action name.<commit_after>
from django.contrib import admin from invitation.models import InvitationKey, InvitationUser, InvitationRequest, InvitationCode class InvitationKeyAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'from_user', 'date_invited', 'key_expired') class InvitationUserAdmin(admin.ModelAdmin): list_display = ('inviter', 'invitations_remaining') def invite_user(modeladmin, request, queryset): for invitation_request in queryset.all(): invitation = InvitationKey.objects.create_invitation(request.user) invitation.send_to(invitation_request.email) invitation_request.invited = True invitation_request.save() invite_user.short_description = "Invite selected invitation requests" class InvitationRequestAdmin(admin.ModelAdmin): list_display = ('email', 'invited') actions = [invite_user] admin.site.register(InvitationKey, InvitationKeyAdmin) admin.site.register(InvitationUser, InvitationUserAdmin) admin.site.register(InvitationRequest, InvitationRequestAdmin) admin.site.register(InvitationCode)
from django.contrib import admin from invitation.models import InvitationKey, InvitationUser, InvitationRequest, InvitationCode class InvitationKeyAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'from_user', 'date_invited', 'key_expired') class InvitationUserAdmin(admin.ModelAdmin): list_display = ('inviter', 'invitations_remaining') def invite_user(modeladmin, request, queryset): for invitation_request in queryset.all(): invitation = InvitationKey.objects.create_invitation(request.user) invitation.send_to(invitation_request.email) invitation_request.invited = True invitation_request.save() invite_user.short_description = "Invite this user" class InvitationRequestAdmin(admin.ModelAdmin): list_display = ('email', 'invited') actions = [invite_user] admin.site.register(InvitationKey, InvitationKeyAdmin) admin.site.register(InvitationUser, InvitationUserAdmin) admin.site.register(InvitationRequest, InvitationRequestAdmin) admin.site.register(InvitationCode) Improve the invite_user action name.from django.contrib import admin from invitation.models import InvitationKey, InvitationUser, InvitationRequest, InvitationCode class InvitationKeyAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'from_user', 'date_invited', 'key_expired') class InvitationUserAdmin(admin.ModelAdmin): list_display = ('inviter', 'invitations_remaining') def invite_user(modeladmin, request, queryset): for invitation_request in queryset.all(): invitation = InvitationKey.objects.create_invitation(request.user) invitation.send_to(invitation_request.email) invitation_request.invited = True invitation_request.save() invite_user.short_description = "Invite selected invitation requests" class InvitationRequestAdmin(admin.ModelAdmin): list_display = ('email', 'invited') actions = [invite_user] admin.site.register(InvitationKey, InvitationKeyAdmin) admin.site.register(InvitationUser, InvitationUserAdmin) admin.site.register(InvitationRequest, InvitationRequestAdmin) admin.site.register(InvitationCode)
<commit_before>from django.contrib import admin from invitation.models import InvitationKey, InvitationUser, InvitationRequest, InvitationCode class InvitationKeyAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'from_user', 'date_invited', 'key_expired') class InvitationUserAdmin(admin.ModelAdmin): list_display = ('inviter', 'invitations_remaining') def invite_user(modeladmin, request, queryset): for invitation_request in queryset.all(): invitation = InvitationKey.objects.create_invitation(request.user) invitation.send_to(invitation_request.email) invitation_request.invited = True invitation_request.save() invite_user.short_description = "Invite this user" class InvitationRequestAdmin(admin.ModelAdmin): list_display = ('email', 'invited') actions = [invite_user] admin.site.register(InvitationKey, InvitationKeyAdmin) admin.site.register(InvitationUser, InvitationUserAdmin) admin.site.register(InvitationRequest, InvitationRequestAdmin) admin.site.register(InvitationCode) <commit_msg>Improve the invite_user action name.<commit_after>from django.contrib import admin from invitation.models import InvitationKey, InvitationUser, InvitationRequest, InvitationCode class InvitationKeyAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'from_user', 'date_invited', 'key_expired') class InvitationUserAdmin(admin.ModelAdmin): list_display = ('inviter', 'invitations_remaining') def invite_user(modeladmin, request, queryset): for invitation_request in queryset.all(): invitation = InvitationKey.objects.create_invitation(request.user) invitation.send_to(invitation_request.email) invitation_request.invited = True invitation_request.save() invite_user.short_description = "Invite selected invitation requests" class InvitationRequestAdmin(admin.ModelAdmin): list_display = ('email', 'invited') actions = [invite_user] admin.site.register(InvitationKey, InvitationKeyAdmin) admin.site.register(InvitationUser, InvitationUserAdmin) admin.site.register(InvitationRequest, InvitationRequestAdmin) admin.site.register(InvitationCode)
ba23baaee867ed79762fb3e3ac10af47d028d9ed
ergae/app.py
ergae/app.py
# ergae --- Earth Reader on Google App Engine # Copyright (C) 2014 Hong Minhee # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import from flask import Flask from .dropbox import mod app = Flask(__name__) app.register_blueprint(mod)
# ergae --- Earth Reader on Google App Engine # Copyright (C) 2014 Hong Minhee # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import import os from flask import Flask from .config import get_config, set_config from .dropbox import mod app = Flask(__name__) app.register_blueprint(mod) app.secret_key = get_config('secret_key') if app.secret_key is None: app.secret_key = os.urandom(24) set_config('secret_key', app.secret_key)
Create random HMAC secret and save it
Create random HMAC secret and save it
Python
agpl-3.0
earthreader/ergae
# ergae --- Earth Reader on Google App Engine # Copyright (C) 2014 Hong Minhee # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import from flask import Flask from .dropbox import mod app = Flask(__name__) app.register_blueprint(mod) Create random HMAC secret and save it
# ergae --- Earth Reader on Google App Engine # Copyright (C) 2014 Hong Minhee # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import import os from flask import Flask from .config import get_config, set_config from .dropbox import mod app = Flask(__name__) app.register_blueprint(mod) app.secret_key = get_config('secret_key') if app.secret_key is None: app.secret_key = os.urandom(24) set_config('secret_key', app.secret_key)
<commit_before># ergae --- Earth Reader on Google App Engine # Copyright (C) 2014 Hong Minhee # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import from flask import Flask from .dropbox import mod app = Flask(__name__) app.register_blueprint(mod) <commit_msg>Create random HMAC secret and save it<commit_after>
# ergae --- Earth Reader on Google App Engine # Copyright (C) 2014 Hong Minhee # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import import os from flask import Flask from .config import get_config, set_config from .dropbox import mod app = Flask(__name__) app.register_blueprint(mod) app.secret_key = get_config('secret_key') if app.secret_key is None: app.secret_key = os.urandom(24) set_config('secret_key', app.secret_key)
# ergae --- Earth Reader on Google App Engine # Copyright (C) 2014 Hong Minhee # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import from flask import Flask from .dropbox import mod app = Flask(__name__) app.register_blueprint(mod) Create random HMAC secret and save it# ergae --- Earth Reader on Google App Engine # Copyright (C) 2014 Hong Minhee # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import import os from flask import Flask from .config import get_config, set_config from .dropbox import mod app = Flask(__name__) app.register_blueprint(mod) app.secret_key = get_config('secret_key') if app.secret_key is None: app.secret_key = os.urandom(24) set_config('secret_key', app.secret_key)
<commit_before># ergae --- Earth Reader on Google App Engine # Copyright (C) 2014 Hong Minhee # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import from flask import Flask from .dropbox import mod app = Flask(__name__) app.register_blueprint(mod) <commit_msg>Create random HMAC secret and save it<commit_after># ergae --- Earth Reader on Google App Engine # Copyright (C) 2014 Hong Minhee # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import import os from flask import Flask from .config import get_config, set_config from .dropbox import mod app = Flask(__name__) app.register_blueprint(mod) app.secret_key = get_config('secret_key') if app.secret_key is None: app.secret_key = os.urandom(24) set_config('secret_key', app.secret_key)
82d90487a43e309074e5572b6ac529a707345274
fileutils.py
fileutils.py
##-*- coding: utf-8 -*- #!/usr/bin/python """ Utilities related to Files. """ from io import FileIO, BufferedWriter __author__ = 'SeomGi, Han' __credits__ = ['SeomGi, Han'] __copyright__ = 'Copyright 2015, Python Utils Project' __license__ = 'MIT' __version__ = '1.0.0' __maintainer__ = 'SeomGi, Han' __email__ = 'iandmyhand@gmail.com' __status__ = 'Production' class FileUtils: def copy_buffered_io_to_file(self, buffered_io, file_path): with FileIO(file_path, mode='wb') as raw_output_io: with BufferedWriter(raw_output_io) as writer: while 1: line = buffered_io.readline() if not line: break writer.write(line) buffered_io.close()
##-*- coding: utf-8 -*- #!/usr/bin/python """ Utilities related to Files. """ import os from io import FileIO, BufferedReader, BufferedWriter __author__ = 'SeomGi, Han' __credits__ = ['SeomGi, Han'] __copyright__ = 'Copyright 2015, Python Utils Project' __license__ = 'MIT' __version__ = '1.0.0' __maintainer__ = 'SeomGi, Han' __email__ = 'iandmyhand@gmail.com' __status__ = 'Production' class FileUtils: def copy_file_stream_to_file(self, file, to_path): self.copy_buffered_io_to_file(BufferedReader(file), to_path) def copy_buffered_io_to_file(self, buffered_io, file_path): os.makedirs(file_path[:file_path.rfind('/') + 1], exist_ok=True) with FileIO(file_path, mode='wb') as raw_output_io: with BufferedWriter(raw_output_io) as writer: while 1: line = buffered_io.readline() if not line: break writer.write(line) buffered_io.close()
Add wrapped raw file copy function that use BufferedReader. And add logic to make directory if target directory isn't exist.
Add wrapped raw file copy function that use BufferedReader. And add logic to make directory if target directory isn't exist.
Python
mit
iandmyhand/python-utils
##-*- coding: utf-8 -*- #!/usr/bin/python """ Utilities related to Files. """ from io import FileIO, BufferedWriter __author__ = 'SeomGi, Han' __credits__ = ['SeomGi, Han'] __copyright__ = 'Copyright 2015, Python Utils Project' __license__ = 'MIT' __version__ = '1.0.0' __maintainer__ = 'SeomGi, Han' __email__ = 'iandmyhand@gmail.com' __status__ = 'Production' class FileUtils: def copy_buffered_io_to_file(self, buffered_io, file_path): with FileIO(file_path, mode='wb') as raw_output_io: with BufferedWriter(raw_output_io) as writer: while 1: line = buffered_io.readline() if not line: break writer.write(line) buffered_io.close() Add wrapped raw file copy function that use BufferedReader. And add logic to make directory if target directory isn't exist.
##-*- coding: utf-8 -*- #!/usr/bin/python """ Utilities related to Files. """ import os from io import FileIO, BufferedReader, BufferedWriter __author__ = 'SeomGi, Han' __credits__ = ['SeomGi, Han'] __copyright__ = 'Copyright 2015, Python Utils Project' __license__ = 'MIT' __version__ = '1.0.0' __maintainer__ = 'SeomGi, Han' __email__ = 'iandmyhand@gmail.com' __status__ = 'Production' class FileUtils: def copy_file_stream_to_file(self, file, to_path): self.copy_buffered_io_to_file(BufferedReader(file), to_path) def copy_buffered_io_to_file(self, buffered_io, file_path): os.makedirs(file_path[:file_path.rfind('/') + 1], exist_ok=True) with FileIO(file_path, mode='wb') as raw_output_io: with BufferedWriter(raw_output_io) as writer: while 1: line = buffered_io.readline() if not line: break writer.write(line) buffered_io.close()
<commit_before>##-*- coding: utf-8 -*- #!/usr/bin/python """ Utilities related to Files. """ from io import FileIO, BufferedWriter __author__ = 'SeomGi, Han' __credits__ = ['SeomGi, Han'] __copyright__ = 'Copyright 2015, Python Utils Project' __license__ = 'MIT' __version__ = '1.0.0' __maintainer__ = 'SeomGi, Han' __email__ = 'iandmyhand@gmail.com' __status__ = 'Production' class FileUtils: def copy_buffered_io_to_file(self, buffered_io, file_path): with FileIO(file_path, mode='wb') as raw_output_io: with BufferedWriter(raw_output_io) as writer: while 1: line = buffered_io.readline() if not line: break writer.write(line) buffered_io.close() <commit_msg>Add wrapped raw file copy function that use BufferedReader. And add logic to make directory if target directory isn't exist.<commit_after>
##-*- coding: utf-8 -*- #!/usr/bin/python """ Utilities related to Files. """ import os from io import FileIO, BufferedReader, BufferedWriter __author__ = 'SeomGi, Han' __credits__ = ['SeomGi, Han'] __copyright__ = 'Copyright 2015, Python Utils Project' __license__ = 'MIT' __version__ = '1.0.0' __maintainer__ = 'SeomGi, Han' __email__ = 'iandmyhand@gmail.com' __status__ = 'Production' class FileUtils: def copy_file_stream_to_file(self, file, to_path): self.copy_buffered_io_to_file(BufferedReader(file), to_path) def copy_buffered_io_to_file(self, buffered_io, file_path): os.makedirs(file_path[:file_path.rfind('/') + 1], exist_ok=True) with FileIO(file_path, mode='wb') as raw_output_io: with BufferedWriter(raw_output_io) as writer: while 1: line = buffered_io.readline() if not line: break writer.write(line) buffered_io.close()
##-*- coding: utf-8 -*- #!/usr/bin/python """ Utilities related to Files. """ from io import FileIO, BufferedWriter __author__ = 'SeomGi, Han' __credits__ = ['SeomGi, Han'] __copyright__ = 'Copyright 2015, Python Utils Project' __license__ = 'MIT' __version__ = '1.0.0' __maintainer__ = 'SeomGi, Han' __email__ = 'iandmyhand@gmail.com' __status__ = 'Production' class FileUtils: def copy_buffered_io_to_file(self, buffered_io, file_path): with FileIO(file_path, mode='wb') as raw_output_io: with BufferedWriter(raw_output_io) as writer: while 1: line = buffered_io.readline() if not line: break writer.write(line) buffered_io.close() Add wrapped raw file copy function that use BufferedReader. And add logic to make directory if target directory isn't exist.##-*- coding: utf-8 -*- #!/usr/bin/python """ Utilities related to Files. """ import os from io import FileIO, BufferedReader, BufferedWriter __author__ = 'SeomGi, Han' __credits__ = ['SeomGi, Han'] __copyright__ = 'Copyright 2015, Python Utils Project' __license__ = 'MIT' __version__ = '1.0.0' __maintainer__ = 'SeomGi, Han' __email__ = 'iandmyhand@gmail.com' __status__ = 'Production' class FileUtils: def copy_file_stream_to_file(self, file, to_path): self.copy_buffered_io_to_file(BufferedReader(file), to_path) def copy_buffered_io_to_file(self, buffered_io, file_path): os.makedirs(file_path[:file_path.rfind('/') + 1], exist_ok=True) with FileIO(file_path, mode='wb') as raw_output_io: with BufferedWriter(raw_output_io) as writer: while 1: line = buffered_io.readline() if not line: break writer.write(line) buffered_io.close()
<commit_before>##-*- coding: utf-8 -*- #!/usr/bin/python """ Utilities related to Files. """ from io import FileIO, BufferedWriter __author__ = 'SeomGi, Han' __credits__ = ['SeomGi, Han'] __copyright__ = 'Copyright 2015, Python Utils Project' __license__ = 'MIT' __version__ = '1.0.0' __maintainer__ = 'SeomGi, Han' __email__ = 'iandmyhand@gmail.com' __status__ = 'Production' class FileUtils: def copy_buffered_io_to_file(self, buffered_io, file_path): with FileIO(file_path, mode='wb') as raw_output_io: with BufferedWriter(raw_output_io) as writer: while 1: line = buffered_io.readline() if not line: break writer.write(line) buffered_io.close() <commit_msg>Add wrapped raw file copy function that use BufferedReader. And add logic to make directory if target directory isn't exist.<commit_after>##-*- coding: utf-8 -*- #!/usr/bin/python """ Utilities related to Files. """ import os from io import FileIO, BufferedReader, BufferedWriter __author__ = 'SeomGi, Han' __credits__ = ['SeomGi, Han'] __copyright__ = 'Copyright 2015, Python Utils Project' __license__ = 'MIT' __version__ = '1.0.0' __maintainer__ = 'SeomGi, Han' __email__ = 'iandmyhand@gmail.com' __status__ = 'Production' class FileUtils: def copy_file_stream_to_file(self, file, to_path): self.copy_buffered_io_to_file(BufferedReader(file), to_path) def copy_buffered_io_to_file(self, buffered_io, file_path): os.makedirs(file_path[:file_path.rfind('/') + 1], exist_ok=True) with FileIO(file_path, mode='wb') as raw_output_io: with BufferedWriter(raw_output_io) as writer: while 1: line = buffered_io.readline() if not line: break writer.write(line) buffered_io.close()
a353e2227e9d8f7c5ccdb890fa70d4166751af22
example/wsgi.py
example/wsgi.py
""" WSGI config for test2 project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test2.settings") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)
import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test2.settings") application = get_wsgi_application()
Fix an occurrence of E402
Fix an occurrence of E402
Python
bsd-3-clause
diegobz/django-admin-sso,matthiask/django-admin-sso,matthiask/django-admin-sso,diegobz/django-admin-sso
""" WSGI config for test2 project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test2.settings") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application) Fix an occurrence of E402
import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test2.settings") application = get_wsgi_application()
<commit_before>""" WSGI config for test2 project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test2.settings") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application) <commit_msg>Fix an occurrence of E402<commit_after>
import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test2.settings") application = get_wsgi_application()
""" WSGI config for test2 project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test2.settings") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application) Fix an occurrence of E402import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test2.settings") application = get_wsgi_application()
<commit_before>""" WSGI config for test2 project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test2.settings") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application) <commit_msg>Fix an occurrence of E402<commit_after>import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test2.settings") application = get_wsgi_application()
0418609dd429a45a327ace191514ce2c4233ea11
tests_django/test_settings.py
tests_django/test_settings.py
""" Test Django settings """ import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'fake-key' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'chatterbot.ext.django_chatterbot', 'tests_django', ] CHATTERBOT = { 'name': 'Test Django ChatterBot', 'trainer': 'chatterbot.trainers.ChatterBotCorpusTrainer', 'training_data': [ 'chatterbot.corpus.english.greetings' ], 'initialize': False } ROOT_URLCONF = 'chatterbot.ext.django_chatterbot.urls' MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }
""" Test Django settings """ import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'fake-key' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'chatterbot.ext.django_chatterbot', 'tests_django', ] CHATTERBOT = { 'name': 'Test Django ChatterBot', 'trainer': 'chatterbot.trainers.ChatterBotCorpusTrainer', 'training_data': [ 'chatterbot.corpus.english.greetings' ], 'initialize': False } ROOT_URLCONF = 'chatterbot.ext.django_chatterbot.urls' MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } USE_TZ = True
Set USE_TZ in test settings
Set USE_TZ in test settings
Python
bsd-3-clause
davizucon/ChatterBot,maclogan/VirtualPenPal,Reinaesaya/OUIRL-ChatBot,gunthercox/ChatterBot,Gustavo6046/ChatterBot,vkosuri/ChatterBot,Reinaesaya/OUIRL-ChatBot
""" Test Django settings """ import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'fake-key' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'chatterbot.ext.django_chatterbot', 'tests_django', ] CHATTERBOT = { 'name': 'Test Django ChatterBot', 'trainer': 'chatterbot.trainers.ChatterBotCorpusTrainer', 'training_data': [ 'chatterbot.corpus.english.greetings' ], 'initialize': False } ROOT_URLCONF = 'chatterbot.ext.django_chatterbot.urls' MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } Set USE_TZ in test settings
""" Test Django settings """ import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'fake-key' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'chatterbot.ext.django_chatterbot', 'tests_django', ] CHATTERBOT = { 'name': 'Test Django ChatterBot', 'trainer': 'chatterbot.trainers.ChatterBotCorpusTrainer', 'training_data': [ 'chatterbot.corpus.english.greetings' ], 'initialize': False } ROOT_URLCONF = 'chatterbot.ext.django_chatterbot.urls' MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } USE_TZ = True
<commit_before>""" Test Django settings """ import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'fake-key' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'chatterbot.ext.django_chatterbot', 'tests_django', ] CHATTERBOT = { 'name': 'Test Django ChatterBot', 'trainer': 'chatterbot.trainers.ChatterBotCorpusTrainer', 'training_data': [ 'chatterbot.corpus.english.greetings' ], 'initialize': False } ROOT_URLCONF = 'chatterbot.ext.django_chatterbot.urls' MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } <commit_msg>Set USE_TZ in test settings<commit_after>
""" Test Django settings """ import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'fake-key' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'chatterbot.ext.django_chatterbot', 'tests_django', ] CHATTERBOT = { 'name': 'Test Django ChatterBot', 'trainer': 'chatterbot.trainers.ChatterBotCorpusTrainer', 'training_data': [ 'chatterbot.corpus.english.greetings' ], 'initialize': False } ROOT_URLCONF = 'chatterbot.ext.django_chatterbot.urls' MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } USE_TZ = True
""" Test Django settings """ import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'fake-key' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'chatterbot.ext.django_chatterbot', 'tests_django', ] CHATTERBOT = { 'name': 'Test Django ChatterBot', 'trainer': 'chatterbot.trainers.ChatterBotCorpusTrainer', 'training_data': [ 'chatterbot.corpus.english.greetings' ], 'initialize': False } ROOT_URLCONF = 'chatterbot.ext.django_chatterbot.urls' MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } Set USE_TZ in test settings""" Test Django settings """ import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'fake-key' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'chatterbot.ext.django_chatterbot', 'tests_django', ] CHATTERBOT = { 'name': 'Test Django ChatterBot', 'trainer': 'chatterbot.trainers.ChatterBotCorpusTrainer', 'training_data': [ 'chatterbot.corpus.english.greetings' ], 'initialize': False } ROOT_URLCONF = 'chatterbot.ext.django_chatterbot.urls' MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } USE_TZ = True
<commit_before>""" Test Django settings """ import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'fake-key' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'chatterbot.ext.django_chatterbot', 'tests_django', ] CHATTERBOT = { 'name': 'Test Django ChatterBot', 'trainer': 'chatterbot.trainers.ChatterBotCorpusTrainer', 'training_data': [ 'chatterbot.corpus.english.greetings' ], 'initialize': False } ROOT_URLCONF = 'chatterbot.ext.django_chatterbot.urls' MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } <commit_msg>Set USE_TZ in test settings<commit_after>""" Test Django settings """ import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'fake-key' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'chatterbot.ext.django_chatterbot', 'tests_django', ] CHATTERBOT = { 'name': 'Test Django ChatterBot', 'trainer': 'chatterbot.trainers.ChatterBotCorpusTrainer', 'training_data': [ 'chatterbot.corpus.english.greetings' ], 'initialize': False } ROOT_URLCONF = 'chatterbot.ext.django_chatterbot.urls' MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } USE_TZ = True
4fa22298598add3541baf8ac4b3636eb4c64b9ec
fuzzer/tasks.py
fuzzer/tasks.py
import redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def drill(binary, input, fuzz_bitmap, exit_on_eof=False): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash: l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fuzzer.kill() return fuzzer.found_crash()
import redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def fuzz(binary): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash: l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fuzzer.kill() return fuzzer.found_crash()
Fix function declaration for fuzz task
Fix function declaration for fuzz task
Python
bsd-2-clause
shellphish/driller
import redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def drill(binary, input, fuzz_bitmap, exit_on_eof=False): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash: l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fuzzer.kill() return fuzzer.found_crash() Fix function declaration for fuzz task
import redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def fuzz(binary): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash: l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fuzzer.kill() return fuzzer.found_crash()
<commit_before>import redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def drill(binary, input, fuzz_bitmap, exit_on_eof=False): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash: l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fuzzer.kill() return fuzzer.found_crash() <commit_msg>Fix function declaration for fuzz task<commit_after>
import redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def fuzz(binary): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash: l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fuzzer.kill() return fuzzer.found_crash()
import redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def drill(binary, input, fuzz_bitmap, exit_on_eof=False): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash: l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fuzzer.kill() return fuzzer.found_crash() Fix function declaration for fuzz taskimport redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def fuzz(binary): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash: l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fuzzer.kill() return fuzzer.found_crash()
<commit_before>import redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def drill(binary, input, fuzz_bitmap, exit_on_eof=False): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash: l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fuzzer.kill() return fuzzer.found_crash() <commit_msg>Fix function declaration for fuzz task<commit_after>import redis from celery import Celery from .Fuzzer import Fuzzer import os import time import driller.config as config import logging l = logging.getLogger("fuzzer.tasks") backend_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT) app = Celery('fuzzer', broker=config.BROKER_URL, backend=backend_url) @app.task def fuzz(binary): binary_path = os.path.join(config.BINARY_DIR, binary) fuzzer = Fuzzer(binary_path, "tests", config.FUZZER_INSTANCES) try: fuzzer.start() except Fuzzer.EarlyCrash: l.info("binary crashed on dummy testcase, moving on...") return 0 # start the fuzzer and poll for a crash or timeout fuzzer.start() while not fuzzer.found_crash() and not fuzzer.timed_out(): time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fuzzer.kill() return fuzzer.found_crash()
45bb9872978311774b97d9243358ffe9eaad3389
client/main.py
client/main.py
import yaml import sys from conversation import Conversation def isLocal(): return len(sys.argv) > 1 and sys.argv[1] == "--local" if isLocal(): from local_mic import Mic else: from mic import Mic if __name__ == "__main__": print "===========================================================" print " JASPER The Talking Computer " print " Copyright 2013 Shubhro Saha & Charlie Marsh " print "===========================================================" profile = yaml.safe_load(open("profile.yml", "r")) mic = Mic("languagemodel.lm", "dictionary.dic", "languagemodel_persona.lm", "dictionary_persona.dic") mic.say("How can I be of service, %s?" % (profile["first_name"])) conversation = Conversation("JASPER", mic, profile) conversation.handleForever()
import yaml import sys from conversation import Conversation def isLocal(): return len(sys.argv) > 1 and sys.argv[1] == "--local" if isLocal(): from local_mic import Mic else: from mic import Mic if __name__ == "__main__": print "===========================================================" print " JASPER The Talking Computer " print " Copyright 2013 Shubhro Saha & Charlie Marsh " print "===========================================================" profile = yaml.safe_load(open("profile.yml", "r")) mic = Mic("languagemodel.lm", "dictionary.dic", "languagemodel_persona.lm", "dictionary_persona.dic") addendum = "" if 'first_name' in profile: addendum = ", %s" % profile["first_name"] mic.say("How can I be of service%s?" % addendum) conversation = Conversation("JASPER", mic, profile) conversation.handleForever()
Check if first_name is set for profile beforehand
Check if first_name is set for profile beforehand
Python
mit
syardumi/jasper-client,benhoff/jasper-client,tsaitsai/jasper-client,densic/HomeAutomation,sunu/jasper-client,fritz-fritz/jasper-client,jskye/voicehud-jasper,clumsical/hackthehouse-marty,djeraseit/jasper-client,markferry/jasper-client,jasperproject/jasper-client,densic/HomeAutomation,sunu/jasper-client,rowhit/jasper-client,jskye/voicehud-jasper,jasperproject/jasper-client,brad999/nikita-client,tsaitsai/jasper-client,aish9r/jasper-client,bdizen/jasper-client,sanyaade-iot/jasper-client,steppy345/jasper-client,joekinley/jasper-client,rowhit/jasper-client,benhoff/jasper-client,brad999/nikita-client,rahul1193/jasper-client,Siretu/jasper-client,markferry/jasper-client,rahul1193/jasper-client,aish9r/jasper-client,fritz-fritz/jasper-client,auhlig/jasper-client,clumsical/hackthehouse-marty,rab206/self-proj-pi,Siretu/jasper-client,brad999/nikita,Brandon32/jasper-client,skylarker/jasper-client,djeraseit/jasper-client,MaakbareWereld/storyteller,steppy345/jasper-client,zanbel/david,skylarker/jasper-client,sukhoi/jasper-client,brad999/nikita,zanbel/david,ajay-gandhi/jasper-client,auhlig/jasper-client,sanyaade-iot/jasper-client,syardumi/jasper-client,joekinley/jasper-client,ajay-gandhi/jasper-client,Brandon32/jasper-client,sukhoi/jasper-client,rab206/self-proj-pi,assanee/jasper-client,assanee/jasper-client,DarrenRainey/jasper-client,MaakbareWereld/storyteller,DarrenRainey/jasper-client,bdizen/jasper-client
import yaml import sys from conversation import Conversation def isLocal(): return len(sys.argv) > 1 and sys.argv[1] == "--local" if isLocal(): from local_mic import Mic else: from mic import Mic if __name__ == "__main__": print "===========================================================" print " JASPER The Talking Computer " print " Copyright 2013 Shubhro Saha & Charlie Marsh " print "===========================================================" profile = yaml.safe_load(open("profile.yml", "r")) mic = Mic("languagemodel.lm", "dictionary.dic", "languagemodel_persona.lm", "dictionary_persona.dic") mic.say("How can I be of service, %s?" % (profile["first_name"])) conversation = Conversation("JASPER", mic, profile) conversation.handleForever() Check if first_name is set for profile beforehand
import yaml import sys from conversation import Conversation def isLocal(): return len(sys.argv) > 1 and sys.argv[1] == "--local" if isLocal(): from local_mic import Mic else: from mic import Mic if __name__ == "__main__": print "===========================================================" print " JASPER The Talking Computer " print " Copyright 2013 Shubhro Saha & Charlie Marsh " print "===========================================================" profile = yaml.safe_load(open("profile.yml", "r")) mic = Mic("languagemodel.lm", "dictionary.dic", "languagemodel_persona.lm", "dictionary_persona.dic") addendum = "" if 'first_name' in profile: addendum = ", %s" % profile["first_name"] mic.say("How can I be of service%s?" % addendum) conversation = Conversation("JASPER", mic, profile) conversation.handleForever()
<commit_before>import yaml import sys from conversation import Conversation def isLocal(): return len(sys.argv) > 1 and sys.argv[1] == "--local" if isLocal(): from local_mic import Mic else: from mic import Mic if __name__ == "__main__": print "===========================================================" print " JASPER The Talking Computer " print " Copyright 2013 Shubhro Saha & Charlie Marsh " print "===========================================================" profile = yaml.safe_load(open("profile.yml", "r")) mic = Mic("languagemodel.lm", "dictionary.dic", "languagemodel_persona.lm", "dictionary_persona.dic") mic.say("How can I be of service, %s?" % (profile["first_name"])) conversation = Conversation("JASPER", mic, profile) conversation.handleForever() <commit_msg>Check if first_name is set for profile beforehand<commit_after>
import yaml import sys from conversation import Conversation def isLocal(): return len(sys.argv) > 1 and sys.argv[1] == "--local" if isLocal(): from local_mic import Mic else: from mic import Mic if __name__ == "__main__": print "===========================================================" print " JASPER The Talking Computer " print " Copyright 2013 Shubhro Saha & Charlie Marsh " print "===========================================================" profile = yaml.safe_load(open("profile.yml", "r")) mic = Mic("languagemodel.lm", "dictionary.dic", "languagemodel_persona.lm", "dictionary_persona.dic") addendum = "" if 'first_name' in profile: addendum = ", %s" % profile["first_name"] mic.say("How can I be of service%s?" % addendum) conversation = Conversation("JASPER", mic, profile) conversation.handleForever()
import yaml import sys from conversation import Conversation def isLocal(): return len(sys.argv) > 1 and sys.argv[1] == "--local" if isLocal(): from local_mic import Mic else: from mic import Mic if __name__ == "__main__": print "===========================================================" print " JASPER The Talking Computer " print " Copyright 2013 Shubhro Saha & Charlie Marsh " print "===========================================================" profile = yaml.safe_load(open("profile.yml", "r")) mic = Mic("languagemodel.lm", "dictionary.dic", "languagemodel_persona.lm", "dictionary_persona.dic") mic.say("How can I be of service, %s?" % (profile["first_name"])) conversation = Conversation("JASPER", mic, profile) conversation.handleForever() Check if first_name is set for profile beforehandimport yaml import sys from conversation import Conversation def isLocal(): return len(sys.argv) > 1 and sys.argv[1] == "--local" if isLocal(): from local_mic import Mic else: from mic import Mic if __name__ == "__main__": print "===========================================================" print " JASPER The Talking Computer " print " Copyright 2013 Shubhro Saha & Charlie Marsh " print "===========================================================" profile = yaml.safe_load(open("profile.yml", "r")) mic = Mic("languagemodel.lm", "dictionary.dic", "languagemodel_persona.lm", "dictionary_persona.dic") addendum = "" if 'first_name' in profile: addendum = ", %s" % profile["first_name"] mic.say("How can I be of service%s?" % addendum) conversation = Conversation("JASPER", mic, profile) conversation.handleForever()
<commit_before>import yaml import sys from conversation import Conversation def isLocal(): return len(sys.argv) > 1 and sys.argv[1] == "--local" if isLocal(): from local_mic import Mic else: from mic import Mic if __name__ == "__main__": print "===========================================================" print " JASPER The Talking Computer " print " Copyright 2013 Shubhro Saha & Charlie Marsh " print "===========================================================" profile = yaml.safe_load(open("profile.yml", "r")) mic = Mic("languagemodel.lm", "dictionary.dic", "languagemodel_persona.lm", "dictionary_persona.dic") mic.say("How can I be of service, %s?" % (profile["first_name"])) conversation = Conversation("JASPER", mic, profile) conversation.handleForever() <commit_msg>Check if first_name is set for profile beforehand<commit_after>import yaml import sys from conversation import Conversation def isLocal(): return len(sys.argv) > 1 and sys.argv[1] == "--local" if isLocal(): from local_mic import Mic else: from mic import Mic if __name__ == "__main__": print "===========================================================" print " JASPER The Talking Computer " print " Copyright 2013 Shubhro Saha & Charlie Marsh " print "===========================================================" profile = yaml.safe_load(open("profile.yml", "r")) mic = Mic("languagemodel.lm", "dictionary.dic", "languagemodel_persona.lm", "dictionary_persona.dic") addendum = "" if 'first_name' in profile: addendum = ", %s" % profile["first_name"] mic.say("How can I be of service%s?" % addendum) conversation = Conversation("JASPER", mic, profile) conversation.handleForever()
d0f425d215f0d1c4f57a3517ad3e4c15f2b35e86
tests/travis_test/TravisBuildTest.py
tests/travis_test/TravisBuildTest.py
import unittest as ut class TravisBuildTest(ut.TestCase): def test_success(self): self.assertEqual(1, 1, "1 is not equal to 1?!") def main(): ut.main() if __name__ == '__main__': main()
import unittest as ut class TravisBuildTest(ut.TestCase): def test_success(self): self.assertEqual(1, 1, "1 is not equal to 1?!") def test_failing(self): self.assertEqual(1, 2) def main(): ut.main() if __name__ == '__main__': main()
Add failing test to dev branch to test, if master badge stay green
Add failing test to dev branch to test, if master badge stay green
Python
mit
PatrikValkovic/grammpy
import unittest as ut class TravisBuildTest(ut.TestCase): def test_success(self): self.assertEqual(1, 1, "1 is not equal to 1?!") def main(): ut.main() if __name__ == '__main__': main() Add failing test to dev branch to test, if master badge stay green
import unittest as ut class TravisBuildTest(ut.TestCase): def test_success(self): self.assertEqual(1, 1, "1 is not equal to 1?!") def test_failing(self): self.assertEqual(1, 2) def main(): ut.main() if __name__ == '__main__': main()
<commit_before>import unittest as ut class TravisBuildTest(ut.TestCase): def test_success(self): self.assertEqual(1, 1, "1 is not equal to 1?!") def main(): ut.main() if __name__ == '__main__': main() <commit_msg>Add failing test to dev branch to test, if master badge stay green<commit_after>
import unittest as ut class TravisBuildTest(ut.TestCase): def test_success(self): self.assertEqual(1, 1, "1 is not equal to 1?!") def test_failing(self): self.assertEqual(1, 2) def main(): ut.main() if __name__ == '__main__': main()
import unittest as ut class TravisBuildTest(ut.TestCase): def test_success(self): self.assertEqual(1, 1, "1 is not equal to 1?!") def main(): ut.main() if __name__ == '__main__': main() Add failing test to dev branch to test, if master badge stay greenimport unittest as ut class TravisBuildTest(ut.TestCase): def test_success(self): self.assertEqual(1, 1, "1 is not equal to 1?!") def test_failing(self): self.assertEqual(1, 2) def main(): ut.main() if __name__ == '__main__': main()
<commit_before>import unittest as ut class TravisBuildTest(ut.TestCase): def test_success(self): self.assertEqual(1, 1, "1 is not equal to 1?!") def main(): ut.main() if __name__ == '__main__': main() <commit_msg>Add failing test to dev branch to test, if master badge stay green<commit_after>import unittest as ut class TravisBuildTest(ut.TestCase): def test_success(self): self.assertEqual(1, 1, "1 is not equal to 1?!") def test_failing(self): self.assertEqual(1, 2) def main(): ut.main() if __name__ == '__main__': main()
5dbdac674692b67f8f08627453b145c4d24ac32f
tests/integration/conftest.py
tests/integration/conftest.py
# coding: utf-8 """Pytest config.""" import os import sys import pytest from kiteconnect import KiteConnect sys.path.append(os.path.join(os.path.dirname(__file__), '../helpers')) def pytest_addoption(parser): """Add available args.""" parser.addoption("--api-key", action="store", default="Api key") parser.addoption("--access-token", action="store", default="Access token") parser.addoption("--root", action="store", default="") def pytest_generate_tests(metafunc): """This is called for every test. Only get/set command line arguments. If the argument is specified in the list of test "fixturenames".""" access_token = metafunc.config.option.access_token api_key = metafunc.config.option.api_key root = metafunc.config.option.root if "access_token" in metafunc.fixturenames and access_token is not None: metafunc.parametrize("access_token", [access_token]) if "api_key" in metafunc.fixturenames and api_key is not None: metafunc.parametrize("api_key", [api_key]) if "root" in metafunc.fixturenames and root is not None: metafunc.parametrize("root", [root]) @pytest.fixture() def kiteconnect(api_key, access_token, root): """Init Kite connect object.""" return KiteConnect(api_key=api_key, access_token=access_token, root=root or None)
# coding: utf-8 """Pytest config.""" import os import sys import pytest from kiteconnect import KiteConnect sys.path.append(os.path.join(os.path.dirname(__file__), '../helpers')) def pytest_addoption(parser): """Add available args.""" parser.addoption("--api-key", action="store", default="Api key") parser.addoption("--access-token", action="store", default="Access token") parser.addoption("--root-url", action="store", default="") def pytest_generate_tests(metafunc): """This is called for every test. Only get/set command line arguments. If the argument is specified in the list of test "fixturenames".""" access_token = metafunc.config.option.access_token api_key = metafunc.config.option.api_key root_url = metafunc.config.option.root_url if "access_token" in metafunc.fixturenames and access_token is not None: metafunc.parametrize("access_token", [access_token]) if "api_key" in metafunc.fixturenames and api_key is not None: metafunc.parametrize("api_key", [api_key]) if "root_url" in metafunc.fixturenames and root_url is not None: metafunc.parametrize("root_url", [root_url]) @pytest.fixture() def kiteconnect(api_key, access_token, root_url): """Init Kite connect object.""" return KiteConnect(api_key=api_key, access_token=access_token, root=root_url or None)
Rename cmd flag root to root-url for integrated tests
Rename cmd flag root to root-url for integrated tests
Python
mit
rainmattertech/pykiteconnect
# coding: utf-8 """Pytest config.""" import os import sys import pytest from kiteconnect import KiteConnect sys.path.append(os.path.join(os.path.dirname(__file__), '../helpers')) def pytest_addoption(parser): """Add available args.""" parser.addoption("--api-key", action="store", default="Api key") parser.addoption("--access-token", action="store", default="Access token") parser.addoption("--root", action="store", default="") def pytest_generate_tests(metafunc): """This is called for every test. Only get/set command line arguments. If the argument is specified in the list of test "fixturenames".""" access_token = metafunc.config.option.access_token api_key = metafunc.config.option.api_key root = metafunc.config.option.root if "access_token" in metafunc.fixturenames and access_token is not None: metafunc.parametrize("access_token", [access_token]) if "api_key" in metafunc.fixturenames and api_key is not None: metafunc.parametrize("api_key", [api_key]) if "root" in metafunc.fixturenames and root is not None: metafunc.parametrize("root", [root]) @pytest.fixture() def kiteconnect(api_key, access_token, root): """Init Kite connect object.""" return KiteConnect(api_key=api_key, access_token=access_token, root=root or None) Rename cmd flag root to root-url for integrated tests
# coding: utf-8 """Pytest config.""" import os import sys import pytest from kiteconnect import KiteConnect sys.path.append(os.path.join(os.path.dirname(__file__), '../helpers')) def pytest_addoption(parser): """Add available args.""" parser.addoption("--api-key", action="store", default="Api key") parser.addoption("--access-token", action="store", default="Access token") parser.addoption("--root-url", action="store", default="") def pytest_generate_tests(metafunc): """This is called for every test. Only get/set command line arguments. If the argument is specified in the list of test "fixturenames".""" access_token = metafunc.config.option.access_token api_key = metafunc.config.option.api_key root_url = metafunc.config.option.root_url if "access_token" in metafunc.fixturenames and access_token is not None: metafunc.parametrize("access_token", [access_token]) if "api_key" in metafunc.fixturenames and api_key is not None: metafunc.parametrize("api_key", [api_key]) if "root_url" in metafunc.fixturenames and root_url is not None: metafunc.parametrize("root_url", [root_url]) @pytest.fixture() def kiteconnect(api_key, access_token, root_url): """Init Kite connect object.""" return KiteConnect(api_key=api_key, access_token=access_token, root=root_url or None)
<commit_before># coding: utf-8 """Pytest config.""" import os import sys import pytest from kiteconnect import KiteConnect sys.path.append(os.path.join(os.path.dirname(__file__), '../helpers')) def pytest_addoption(parser): """Add available args.""" parser.addoption("--api-key", action="store", default="Api key") parser.addoption("--access-token", action="store", default="Access token") parser.addoption("--root", action="store", default="") def pytest_generate_tests(metafunc): """This is called for every test. Only get/set command line arguments. If the argument is specified in the list of test "fixturenames".""" access_token = metafunc.config.option.access_token api_key = metafunc.config.option.api_key root = metafunc.config.option.root if "access_token" in metafunc.fixturenames and access_token is not None: metafunc.parametrize("access_token", [access_token]) if "api_key" in metafunc.fixturenames and api_key is not None: metafunc.parametrize("api_key", [api_key]) if "root" in metafunc.fixturenames and root is not None: metafunc.parametrize("root", [root]) @pytest.fixture() def kiteconnect(api_key, access_token, root): """Init Kite connect object.""" return KiteConnect(api_key=api_key, access_token=access_token, root=root or None) <commit_msg>Rename cmd flag root to root-url for integrated tests<commit_after>
# coding: utf-8 """Pytest config.""" import os import sys import pytest from kiteconnect import KiteConnect sys.path.append(os.path.join(os.path.dirname(__file__), '../helpers')) def pytest_addoption(parser): """Add available args.""" parser.addoption("--api-key", action="store", default="Api key") parser.addoption("--access-token", action="store", default="Access token") parser.addoption("--root-url", action="store", default="") def pytest_generate_tests(metafunc): """This is called for every test. Only get/set command line arguments. If the argument is specified in the list of test "fixturenames".""" access_token = metafunc.config.option.access_token api_key = metafunc.config.option.api_key root_url = metafunc.config.option.root_url if "access_token" in metafunc.fixturenames and access_token is not None: metafunc.parametrize("access_token", [access_token]) if "api_key" in metafunc.fixturenames and api_key is not None: metafunc.parametrize("api_key", [api_key]) if "root_url" in metafunc.fixturenames and root_url is not None: metafunc.parametrize("root_url", [root_url]) @pytest.fixture() def kiteconnect(api_key, access_token, root_url): """Init Kite connect object.""" return KiteConnect(api_key=api_key, access_token=access_token, root=root_url or None)
# coding: utf-8 """Pytest config.""" import os import sys import pytest from kiteconnect import KiteConnect sys.path.append(os.path.join(os.path.dirname(__file__), '../helpers')) def pytest_addoption(parser): """Add available args.""" parser.addoption("--api-key", action="store", default="Api key") parser.addoption("--access-token", action="store", default="Access token") parser.addoption("--root", action="store", default="") def pytest_generate_tests(metafunc): """This is called for every test. Only get/set command line arguments. If the argument is specified in the list of test "fixturenames".""" access_token = metafunc.config.option.access_token api_key = metafunc.config.option.api_key root = metafunc.config.option.root if "access_token" in metafunc.fixturenames and access_token is not None: metafunc.parametrize("access_token", [access_token]) if "api_key" in metafunc.fixturenames and api_key is not None: metafunc.parametrize("api_key", [api_key]) if "root" in metafunc.fixturenames and root is not None: metafunc.parametrize("root", [root]) @pytest.fixture() def kiteconnect(api_key, access_token, root): """Init Kite connect object.""" return KiteConnect(api_key=api_key, access_token=access_token, root=root or None) Rename cmd flag root to root-url for integrated tests# coding: utf-8 """Pytest config.""" import os import sys import pytest from kiteconnect import KiteConnect sys.path.append(os.path.join(os.path.dirname(__file__), '../helpers')) def pytest_addoption(parser): """Add available args.""" parser.addoption("--api-key", action="store", default="Api key") parser.addoption("--access-token", action="store", default="Access token") parser.addoption("--root-url", action="store", default="") def pytest_generate_tests(metafunc): """This is called for every test. Only get/set command line arguments. If the argument is specified in the list of test "fixturenames".""" access_token = metafunc.config.option.access_token api_key = metafunc.config.option.api_key root_url = metafunc.config.option.root_url if "access_token" in metafunc.fixturenames and access_token is not None: metafunc.parametrize("access_token", [access_token]) if "api_key" in metafunc.fixturenames and api_key is not None: metafunc.parametrize("api_key", [api_key]) if "root_url" in metafunc.fixturenames and root_url is not None: metafunc.parametrize("root_url", [root_url]) @pytest.fixture() def kiteconnect(api_key, access_token, root_url): """Init Kite connect object.""" return KiteConnect(api_key=api_key, access_token=access_token, root=root_url or None)
<commit_before># coding: utf-8 """Pytest config.""" import os import sys import pytest from kiteconnect import KiteConnect sys.path.append(os.path.join(os.path.dirname(__file__), '../helpers')) def pytest_addoption(parser): """Add available args.""" parser.addoption("--api-key", action="store", default="Api key") parser.addoption("--access-token", action="store", default="Access token") parser.addoption("--root", action="store", default="") def pytest_generate_tests(metafunc): """This is called for every test. Only get/set command line arguments. If the argument is specified in the list of test "fixturenames".""" access_token = metafunc.config.option.access_token api_key = metafunc.config.option.api_key root = metafunc.config.option.root if "access_token" in metafunc.fixturenames and access_token is not None: metafunc.parametrize("access_token", [access_token]) if "api_key" in metafunc.fixturenames and api_key is not None: metafunc.parametrize("api_key", [api_key]) if "root" in metafunc.fixturenames and root is not None: metafunc.parametrize("root", [root]) @pytest.fixture() def kiteconnect(api_key, access_token, root): """Init Kite connect object.""" return KiteConnect(api_key=api_key, access_token=access_token, root=root or None) <commit_msg>Rename cmd flag root to root-url for integrated tests<commit_after># coding: utf-8 """Pytest config.""" import os import sys import pytest from kiteconnect import KiteConnect sys.path.append(os.path.join(os.path.dirname(__file__), '../helpers')) def pytest_addoption(parser): """Add available args.""" parser.addoption("--api-key", action="store", default="Api key") parser.addoption("--access-token", action="store", default="Access token") parser.addoption("--root-url", action="store", default="") def pytest_generate_tests(metafunc): """This is called for every test. Only get/set command line arguments. If the argument is specified in the list of test "fixturenames".""" access_token = metafunc.config.option.access_token api_key = metafunc.config.option.api_key root_url = metafunc.config.option.root_url if "access_token" in metafunc.fixturenames and access_token is not None: metafunc.parametrize("access_token", [access_token]) if "api_key" in metafunc.fixturenames and api_key is not None: metafunc.parametrize("api_key", [api_key]) if "root_url" in metafunc.fixturenames and root_url is not None: metafunc.parametrize("root_url", [root_url]) @pytest.fixture() def kiteconnect(api_key, access_token, root_url): """Init Kite connect object.""" return KiteConnect(api_key=api_key, access_token=access_token, root=root_url or None)
15808c86b273363c9f6466107d0cbc14030a97fa
ykman/scanmap/__init__.py
ykman/scanmap/__init__.py
from enum import Enum from . import us class KEYBOARD_LAYOUT(Enum): US = 'US Keyboard Layout' def get_scan_codes(data, keyboard_layout=KEYBOARD_LAYOUT.US): if keyboard_layout == KEYBOARD_LAYOUT.US: scancodes = us.scancodes else: raise ValueError('Keyboard layout not supported!') try: return bytes(bytearray(scancodes[c] for c in data)) except KeyError: raise ValueError('Character not available in keyboard layout!')
from enum import Enum from . import us class KEYBOARD_LAYOUT(Enum): US = 'US Keyboard Layout' def get_scan_codes(data, keyboard_layout=KEYBOARD_LAYOUT.US): if keyboard_layout == KEYBOARD_LAYOUT.US: scancodes = us.scancodes else: raise ValueError('Keyboard layout not supported!') try: return bytes(bytearray(scancodes[c] for c in data)) except KeyError as e: raise ValueError('Unsupported character: %s' % e.args[0])
Clarify what character that was missing
Clarify what character that was missing
Python
bsd-2-clause
Yubico/yubikey-manager,Yubico/yubikey-manager
from enum import Enum from . import us class KEYBOARD_LAYOUT(Enum): US = 'US Keyboard Layout' def get_scan_codes(data, keyboard_layout=KEYBOARD_LAYOUT.US): if keyboard_layout == KEYBOARD_LAYOUT.US: scancodes = us.scancodes else: raise ValueError('Keyboard layout not supported!') try: return bytes(bytearray(scancodes[c] for c in data)) except KeyError: raise ValueError('Character not available in keyboard layout!') Clarify what character that was missing
from enum import Enum from . import us class KEYBOARD_LAYOUT(Enum): US = 'US Keyboard Layout' def get_scan_codes(data, keyboard_layout=KEYBOARD_LAYOUT.US): if keyboard_layout == KEYBOARD_LAYOUT.US: scancodes = us.scancodes else: raise ValueError('Keyboard layout not supported!') try: return bytes(bytearray(scancodes[c] for c in data)) except KeyError as e: raise ValueError('Unsupported character: %s' % e.args[0])
<commit_before>from enum import Enum from . import us class KEYBOARD_LAYOUT(Enum): US = 'US Keyboard Layout' def get_scan_codes(data, keyboard_layout=KEYBOARD_LAYOUT.US): if keyboard_layout == KEYBOARD_LAYOUT.US: scancodes = us.scancodes else: raise ValueError('Keyboard layout not supported!') try: return bytes(bytearray(scancodes[c] for c in data)) except KeyError: raise ValueError('Character not available in keyboard layout!') <commit_msg>Clarify what character that was missing<commit_after>
from enum import Enum from . import us class KEYBOARD_LAYOUT(Enum): US = 'US Keyboard Layout' def get_scan_codes(data, keyboard_layout=KEYBOARD_LAYOUT.US): if keyboard_layout == KEYBOARD_LAYOUT.US: scancodes = us.scancodes else: raise ValueError('Keyboard layout not supported!') try: return bytes(bytearray(scancodes[c] for c in data)) except KeyError as e: raise ValueError('Unsupported character: %s' % e.args[0])
from enum import Enum from . import us class KEYBOARD_LAYOUT(Enum): US = 'US Keyboard Layout' def get_scan_codes(data, keyboard_layout=KEYBOARD_LAYOUT.US): if keyboard_layout == KEYBOARD_LAYOUT.US: scancodes = us.scancodes else: raise ValueError('Keyboard layout not supported!') try: return bytes(bytearray(scancodes[c] for c in data)) except KeyError: raise ValueError('Character not available in keyboard layout!') Clarify what character that was missingfrom enum import Enum from . import us class KEYBOARD_LAYOUT(Enum): US = 'US Keyboard Layout' def get_scan_codes(data, keyboard_layout=KEYBOARD_LAYOUT.US): if keyboard_layout == KEYBOARD_LAYOUT.US: scancodes = us.scancodes else: raise ValueError('Keyboard layout not supported!') try: return bytes(bytearray(scancodes[c] for c in data)) except KeyError as e: raise ValueError('Unsupported character: %s' % e.args[0])
<commit_before>from enum import Enum from . import us class KEYBOARD_LAYOUT(Enum): US = 'US Keyboard Layout' def get_scan_codes(data, keyboard_layout=KEYBOARD_LAYOUT.US): if keyboard_layout == KEYBOARD_LAYOUT.US: scancodes = us.scancodes else: raise ValueError('Keyboard layout not supported!') try: return bytes(bytearray(scancodes[c] for c in data)) except KeyError: raise ValueError('Character not available in keyboard layout!') <commit_msg>Clarify what character that was missing<commit_after>from enum import Enum from . import us class KEYBOARD_LAYOUT(Enum): US = 'US Keyboard Layout' def get_scan_codes(data, keyboard_layout=KEYBOARD_LAYOUT.US): if keyboard_layout == KEYBOARD_LAYOUT.US: scancodes = us.scancodes else: raise ValueError('Keyboard layout not supported!') try: return bytes(bytearray(scancodes[c] for c in data)) except KeyError as e: raise ValueError('Unsupported character: %s' % e.args[0])
611218f302d30213fece13c1a8997f87a44afa70
djangosqladmin/databases/views.py
djangosqladmin/databases/views.py
from django.shortcuts import render from django.http import HttpResponse def dashboard(request): return HttpResponse('SUCCESS!')
from django.shortcuts import render from django.http import HttpResponse def dashboard(request): context = {} if request.user.is_authenticated: context['databases'] = request.user.database_set.all() return render(request, 'databases/dashboard.html', context)
Add databases to dashboard context
Add databases to dashboard context
Python
mit
jakesen/djangosqladmin,jakesen/djangosqladmin,jakesen/djangosqladmin
from django.shortcuts import render from django.http import HttpResponse def dashboard(request): return HttpResponse('SUCCESS!') Add databases to dashboard context
from django.shortcuts import render from django.http import HttpResponse def dashboard(request): context = {} if request.user.is_authenticated: context['databases'] = request.user.database_set.all() return render(request, 'databases/dashboard.html', context)
<commit_before>from django.shortcuts import render from django.http import HttpResponse def dashboard(request): return HttpResponse('SUCCESS!') <commit_msg>Add databases to dashboard context<commit_after>
from django.shortcuts import render from django.http import HttpResponse def dashboard(request): context = {} if request.user.is_authenticated: context['databases'] = request.user.database_set.all() return render(request, 'databases/dashboard.html', context)
from django.shortcuts import render from django.http import HttpResponse def dashboard(request): return HttpResponse('SUCCESS!') Add databases to dashboard contextfrom django.shortcuts import render from django.http import HttpResponse def dashboard(request): context = {} if request.user.is_authenticated: context['databases'] = request.user.database_set.all() return render(request, 'databases/dashboard.html', context)
<commit_before>from django.shortcuts import render from django.http import HttpResponse def dashboard(request): return HttpResponse('SUCCESS!') <commit_msg>Add databases to dashboard context<commit_after>from django.shortcuts import render from django.http import HttpResponse def dashboard(request): context = {} if request.user.is_authenticated: context['databases'] = request.user.database_set.all() return render(request, 'databases/dashboard.html', context)
151c3484da58fa02f7d2c69454be3cb4e3395d05
recipes/recipe_modules/bot_update/tests/ensure_checkout.py
recipes/recipe_modules/bot_update/tests/ensure_checkout.py
# Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine import post_process DEPS = [ 'bot_update', 'gclient', 'recipe_engine/json', ] def RunSteps(api): api.gclient.set_config('depot_tools') api.bot_update.ensure_checkout() def GenTests(api): yield ( api.test('basic') + api.post_process(post_process.StatusCodeIn, 0) + api.post_process(post_process.DropExpectation) ) yield ( api.test('failure') + api.override_step_data( 'bot_update', api.json.output({'did_run': True}), retcode=1) + api.post_process(post_process.StatusCodeIn, 1) + api.post_process(post_process.DropExpectation) )
# Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine import post_process DEPS = [ 'bot_update', 'gclient', 'recipe_engine/json', ] def RunSteps(api): api.gclient.set_config('depot_tools') api.bot_update.ensure_checkout() def GenTests(api): yield ( api.test('basic') + api.post_process(post_process.StatusSuccess) + api.post_process(post_process.DropExpectation) ) yield ( api.test('failure') + api.override_step_data( 'bot_update', api.json.output({'did_run': True}), retcode=1) + api.post_process(post_process.StatusAnyFailure) + api.post_process(post_process.DropExpectation) )
Replace post-process checks with ones that are not deprecated
Replace post-process checks with ones that are not deprecated R=40f3d43a28ebae3cb819288542e1c84d73d962d5@chromium.org Bug: 899266 Change-Id: Ia9b1f38590d636fa2858a2bd0bbf75d6b2cfe8fa Reviewed-on: https://chromium-review.googlesource.com/c/1483033 Reviewed-by: Robbie Iannucci <40f3d43a28ebae3cb819288542e1c84d73d962d5@chromium.org> Reviewed-by: John Budorick <17d38a2d68c6a07a3ab0ce4a2873c5acefbd3dbb@chromium.org> Commit-Queue: Sergiy Belozorov <aadb4a11584aa9878242ea5d9e4b7e3429654579@chromium.org>
Python
bsd-3-clause
CoherentLabs/depot_tools,CoherentLabs/depot_tools
# Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine import post_process DEPS = [ 'bot_update', 'gclient', 'recipe_engine/json', ] def RunSteps(api): api.gclient.set_config('depot_tools') api.bot_update.ensure_checkout() def GenTests(api): yield ( api.test('basic') + api.post_process(post_process.StatusCodeIn, 0) + api.post_process(post_process.DropExpectation) ) yield ( api.test('failure') + api.override_step_data( 'bot_update', api.json.output({'did_run': True}), retcode=1) + api.post_process(post_process.StatusCodeIn, 1) + api.post_process(post_process.DropExpectation) ) Replace post-process checks with ones that are not deprecated R=40f3d43a28ebae3cb819288542e1c84d73d962d5@chromium.org Bug: 899266 Change-Id: Ia9b1f38590d636fa2858a2bd0bbf75d6b2cfe8fa Reviewed-on: https://chromium-review.googlesource.com/c/1483033 Reviewed-by: Robbie Iannucci <40f3d43a28ebae3cb819288542e1c84d73d962d5@chromium.org> Reviewed-by: John Budorick <17d38a2d68c6a07a3ab0ce4a2873c5acefbd3dbb@chromium.org> Commit-Queue: Sergiy Belozorov <aadb4a11584aa9878242ea5d9e4b7e3429654579@chromium.org>
# Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine import post_process DEPS = [ 'bot_update', 'gclient', 'recipe_engine/json', ] def RunSteps(api): api.gclient.set_config('depot_tools') api.bot_update.ensure_checkout() def GenTests(api): yield ( api.test('basic') + api.post_process(post_process.StatusSuccess) + api.post_process(post_process.DropExpectation) ) yield ( api.test('failure') + api.override_step_data( 'bot_update', api.json.output({'did_run': True}), retcode=1) + api.post_process(post_process.StatusAnyFailure) + api.post_process(post_process.DropExpectation) )
<commit_before># Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine import post_process DEPS = [ 'bot_update', 'gclient', 'recipe_engine/json', ] def RunSteps(api): api.gclient.set_config('depot_tools') api.bot_update.ensure_checkout() def GenTests(api): yield ( api.test('basic') + api.post_process(post_process.StatusCodeIn, 0) + api.post_process(post_process.DropExpectation) ) yield ( api.test('failure') + api.override_step_data( 'bot_update', api.json.output({'did_run': True}), retcode=1) + api.post_process(post_process.StatusCodeIn, 1) + api.post_process(post_process.DropExpectation) ) <commit_msg>Replace post-process checks with ones that are not deprecated R=40f3d43a28ebae3cb819288542e1c84d73d962d5@chromium.org Bug: 899266 Change-Id: Ia9b1f38590d636fa2858a2bd0bbf75d6b2cfe8fa Reviewed-on: https://chromium-review.googlesource.com/c/1483033 Reviewed-by: Robbie Iannucci <40f3d43a28ebae3cb819288542e1c84d73d962d5@chromium.org> Reviewed-by: John Budorick <17d38a2d68c6a07a3ab0ce4a2873c5acefbd3dbb@chromium.org> Commit-Queue: Sergiy Belozorov <aadb4a11584aa9878242ea5d9e4b7e3429654579@chromium.org><commit_after>
# Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine import post_process DEPS = [ 'bot_update', 'gclient', 'recipe_engine/json', ] def RunSteps(api): api.gclient.set_config('depot_tools') api.bot_update.ensure_checkout() def GenTests(api): yield ( api.test('basic') + api.post_process(post_process.StatusSuccess) + api.post_process(post_process.DropExpectation) ) yield ( api.test('failure') + api.override_step_data( 'bot_update', api.json.output({'did_run': True}), retcode=1) + api.post_process(post_process.StatusAnyFailure) + api.post_process(post_process.DropExpectation) )
# Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine import post_process DEPS = [ 'bot_update', 'gclient', 'recipe_engine/json', ] def RunSteps(api): api.gclient.set_config('depot_tools') api.bot_update.ensure_checkout() def GenTests(api): yield ( api.test('basic') + api.post_process(post_process.StatusCodeIn, 0) + api.post_process(post_process.DropExpectation) ) yield ( api.test('failure') + api.override_step_data( 'bot_update', api.json.output({'did_run': True}), retcode=1) + api.post_process(post_process.StatusCodeIn, 1) + api.post_process(post_process.DropExpectation) ) Replace post-process checks with ones that are not deprecated R=40f3d43a28ebae3cb819288542e1c84d73d962d5@chromium.org Bug: 899266 Change-Id: Ia9b1f38590d636fa2858a2bd0bbf75d6b2cfe8fa Reviewed-on: https://chromium-review.googlesource.com/c/1483033 Reviewed-by: Robbie Iannucci <40f3d43a28ebae3cb819288542e1c84d73d962d5@chromium.org> Reviewed-by: John Budorick <17d38a2d68c6a07a3ab0ce4a2873c5acefbd3dbb@chromium.org> Commit-Queue: Sergiy Belozorov <aadb4a11584aa9878242ea5d9e4b7e3429654579@chromium.org># Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine import post_process DEPS = [ 'bot_update', 'gclient', 'recipe_engine/json', ] def RunSteps(api): api.gclient.set_config('depot_tools') api.bot_update.ensure_checkout() def GenTests(api): yield ( api.test('basic') + api.post_process(post_process.StatusSuccess) + api.post_process(post_process.DropExpectation) ) yield ( api.test('failure') + api.override_step_data( 'bot_update', api.json.output({'did_run': True}), retcode=1) + api.post_process(post_process.StatusAnyFailure) + api.post_process(post_process.DropExpectation) )
<commit_before># Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine import post_process DEPS = [ 'bot_update', 'gclient', 'recipe_engine/json', ] def RunSteps(api): api.gclient.set_config('depot_tools') api.bot_update.ensure_checkout() def GenTests(api): yield ( api.test('basic') + api.post_process(post_process.StatusCodeIn, 0) + api.post_process(post_process.DropExpectation) ) yield ( api.test('failure') + api.override_step_data( 'bot_update', api.json.output({'did_run': True}), retcode=1) + api.post_process(post_process.StatusCodeIn, 1) + api.post_process(post_process.DropExpectation) ) <commit_msg>Replace post-process checks with ones that are not deprecated R=40f3d43a28ebae3cb819288542e1c84d73d962d5@chromium.org Bug: 899266 Change-Id: Ia9b1f38590d636fa2858a2bd0bbf75d6b2cfe8fa Reviewed-on: https://chromium-review.googlesource.com/c/1483033 Reviewed-by: Robbie Iannucci <40f3d43a28ebae3cb819288542e1c84d73d962d5@chromium.org> Reviewed-by: John Budorick <17d38a2d68c6a07a3ab0ce4a2873c5acefbd3dbb@chromium.org> Commit-Queue: Sergiy Belozorov <aadb4a11584aa9878242ea5d9e4b7e3429654579@chromium.org><commit_after># Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine import post_process DEPS = [ 'bot_update', 'gclient', 'recipe_engine/json', ] def RunSteps(api): api.gclient.set_config('depot_tools') api.bot_update.ensure_checkout() def GenTests(api): yield ( api.test('basic') + api.post_process(post_process.StatusSuccess) + api.post_process(post_process.DropExpectation) ) yield ( api.test('failure') + api.override_step_data( 'bot_update', api.json.output({'did_run': True}), retcode=1) + api.post_process(post_process.StatusAnyFailure) + api.post_process(post_process.DropExpectation) )
50bb7a20ee055b870794607022e4e30f8842f80d
openedx/core/djangoapps/appsembler/settings/settings/devstack_lms.py
openedx/core/djangoapps/appsembler/settings/settings/devstack_lms.py
""" Settings for Appsembler on devstack/LMS. """ from os import path from openedx.core.djangoapps.appsembler.settings.settings import devstack_common def plugin_settings(settings): """ Appsembler LMS overrides for devstack. """ devstack_common.plugin_settings(settings) settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False settings.SITE_ID = 1 settings.EDX_API_KEY = "test" settings.ALTERNATE_QUEUE_ENVS = ['cms'] settings.USE_S3_FOR_CUSTOMER_THEMES = False if settings.ENABLE_COMPREHENSIVE_THEMING: assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, ( 'Tahoe supports a single theme, please double check that ' 'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.' ) # Add the LMS-generated customer CSS files to the list # LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css` customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes') if path.isdir(customer_themes_dir): settings.STATICFILES_DIRS.insert(0, customer_themes_dir) # This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths # from the redirect mechanics. settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/']
""" Settings for Appsembler on devstack/LMS. """ from os import path from openedx.core.djangoapps.appsembler.settings.settings import devstack_common def plugin_settings(settings): """ Appsembler LMS overrides for devstack. """ devstack_common.plugin_settings(settings) settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False settings.SITE_ID = 1 settings.EDX_API_KEY = "test" settings.ALTERNATE_QUEUE_ENVS = ['cms'] settings.USE_S3_FOR_CUSTOMER_THEMES = False if settings.ENABLE_COMPREHENSIVE_THEMING: assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, ( 'Tahoe supports a single theme, please double check that ' 'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.' ) # Add the LMS-generated customer CSS files to the list # LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css` customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes') if path.isdir(customer_themes_dir): settings.STATICFILES_DIRS.insert(0, ('customer_themes', customer_themes_dir)) # This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths # from the redirect mechanics. settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/']
Set the customer_themes static dir prefix for devstack (no S3) customer theme file storage to match expectation in SiteConfiguration model method
Set the customer_themes static dir prefix for devstack (no S3) customer theme file storage to match expectation in SiteConfiguration model method
Python
agpl-3.0
appsembler/edx-platform,appsembler/edx-platform,appsembler/edx-platform,appsembler/edx-platform
""" Settings for Appsembler on devstack/LMS. """ from os import path from openedx.core.djangoapps.appsembler.settings.settings import devstack_common def plugin_settings(settings): """ Appsembler LMS overrides for devstack. """ devstack_common.plugin_settings(settings) settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False settings.SITE_ID = 1 settings.EDX_API_KEY = "test" settings.ALTERNATE_QUEUE_ENVS = ['cms'] settings.USE_S3_FOR_CUSTOMER_THEMES = False if settings.ENABLE_COMPREHENSIVE_THEMING: assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, ( 'Tahoe supports a single theme, please double check that ' 'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.' ) # Add the LMS-generated customer CSS files to the list # LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css` customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes') if path.isdir(customer_themes_dir): settings.STATICFILES_DIRS.insert(0, customer_themes_dir) # This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths # from the redirect mechanics. settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/'] Set the customer_themes static dir prefix for devstack (no S3) customer theme file storage to match expectation in SiteConfiguration model method
""" Settings for Appsembler on devstack/LMS. """ from os import path from openedx.core.djangoapps.appsembler.settings.settings import devstack_common def plugin_settings(settings): """ Appsembler LMS overrides for devstack. """ devstack_common.plugin_settings(settings) settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False settings.SITE_ID = 1 settings.EDX_API_KEY = "test" settings.ALTERNATE_QUEUE_ENVS = ['cms'] settings.USE_S3_FOR_CUSTOMER_THEMES = False if settings.ENABLE_COMPREHENSIVE_THEMING: assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, ( 'Tahoe supports a single theme, please double check that ' 'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.' ) # Add the LMS-generated customer CSS files to the list # LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css` customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes') if path.isdir(customer_themes_dir): settings.STATICFILES_DIRS.insert(0, ('customer_themes', customer_themes_dir)) # This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths # from the redirect mechanics. settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/']
<commit_before>""" Settings for Appsembler on devstack/LMS. """ from os import path from openedx.core.djangoapps.appsembler.settings.settings import devstack_common def plugin_settings(settings): """ Appsembler LMS overrides for devstack. """ devstack_common.plugin_settings(settings) settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False settings.SITE_ID = 1 settings.EDX_API_KEY = "test" settings.ALTERNATE_QUEUE_ENVS = ['cms'] settings.USE_S3_FOR_CUSTOMER_THEMES = False if settings.ENABLE_COMPREHENSIVE_THEMING: assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, ( 'Tahoe supports a single theme, please double check that ' 'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.' ) # Add the LMS-generated customer CSS files to the list # LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css` customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes') if path.isdir(customer_themes_dir): settings.STATICFILES_DIRS.insert(0, customer_themes_dir) # This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths # from the redirect mechanics. settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/'] <commit_msg>Set the customer_themes static dir prefix for devstack (no S3) customer theme file storage to match expectation in SiteConfiguration model method<commit_after>
""" Settings for Appsembler on devstack/LMS. """ from os import path from openedx.core.djangoapps.appsembler.settings.settings import devstack_common def plugin_settings(settings): """ Appsembler LMS overrides for devstack. """ devstack_common.plugin_settings(settings) settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False settings.SITE_ID = 1 settings.EDX_API_KEY = "test" settings.ALTERNATE_QUEUE_ENVS = ['cms'] settings.USE_S3_FOR_CUSTOMER_THEMES = False if settings.ENABLE_COMPREHENSIVE_THEMING: assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, ( 'Tahoe supports a single theme, please double check that ' 'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.' ) # Add the LMS-generated customer CSS files to the list # LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css` customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes') if path.isdir(customer_themes_dir): settings.STATICFILES_DIRS.insert(0, ('customer_themes', customer_themes_dir)) # This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths # from the redirect mechanics. settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/']
""" Settings for Appsembler on devstack/LMS. """ from os import path from openedx.core.djangoapps.appsembler.settings.settings import devstack_common def plugin_settings(settings): """ Appsembler LMS overrides for devstack. """ devstack_common.plugin_settings(settings) settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False settings.SITE_ID = 1 settings.EDX_API_KEY = "test" settings.ALTERNATE_QUEUE_ENVS = ['cms'] settings.USE_S3_FOR_CUSTOMER_THEMES = False if settings.ENABLE_COMPREHENSIVE_THEMING: assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, ( 'Tahoe supports a single theme, please double check that ' 'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.' ) # Add the LMS-generated customer CSS files to the list # LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css` customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes') if path.isdir(customer_themes_dir): settings.STATICFILES_DIRS.insert(0, customer_themes_dir) # This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths # from the redirect mechanics. settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/'] Set the customer_themes static dir prefix for devstack (no S3) customer theme file storage to match expectation in SiteConfiguration model method""" Settings for Appsembler on devstack/LMS. """ from os import path from openedx.core.djangoapps.appsembler.settings.settings import devstack_common def plugin_settings(settings): """ Appsembler LMS overrides for devstack. """ devstack_common.plugin_settings(settings) settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False settings.SITE_ID = 1 settings.EDX_API_KEY = "test" settings.ALTERNATE_QUEUE_ENVS = ['cms'] settings.USE_S3_FOR_CUSTOMER_THEMES = False if settings.ENABLE_COMPREHENSIVE_THEMING: assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, ( 'Tahoe supports a single theme, please double check that ' 'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.' ) # Add the LMS-generated customer CSS files to the list # LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css` customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes') if path.isdir(customer_themes_dir): settings.STATICFILES_DIRS.insert(0, ('customer_themes', customer_themes_dir)) # This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths # from the redirect mechanics. settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/']
<commit_before>""" Settings for Appsembler on devstack/LMS. """ from os import path from openedx.core.djangoapps.appsembler.settings.settings import devstack_common def plugin_settings(settings): """ Appsembler LMS overrides for devstack. """ devstack_common.plugin_settings(settings) settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False settings.SITE_ID = 1 settings.EDX_API_KEY = "test" settings.ALTERNATE_QUEUE_ENVS = ['cms'] settings.USE_S3_FOR_CUSTOMER_THEMES = False if settings.ENABLE_COMPREHENSIVE_THEMING: assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, ( 'Tahoe supports a single theme, please double check that ' 'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.' ) # Add the LMS-generated customer CSS files to the list # LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css` customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes') if path.isdir(customer_themes_dir): settings.STATICFILES_DIRS.insert(0, customer_themes_dir) # This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths # from the redirect mechanics. settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/'] <commit_msg>Set the customer_themes static dir prefix for devstack (no S3) customer theme file storage to match expectation in SiteConfiguration model method<commit_after>""" Settings for Appsembler on devstack/LMS. """ from os import path from openedx.core.djangoapps.appsembler.settings.settings import devstack_common def plugin_settings(settings): """ Appsembler LMS overrides for devstack. """ devstack_common.plugin_settings(settings) settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False settings.SITE_ID = 1 settings.EDX_API_KEY = "test" settings.ALTERNATE_QUEUE_ENVS = ['cms'] settings.USE_S3_FOR_CUSTOMER_THEMES = False if settings.ENABLE_COMPREHENSIVE_THEMING: assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, ( 'Tahoe supports a single theme, please double check that ' 'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.' ) # Add the LMS-generated customer CSS files to the list # LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css` customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes') if path.isdir(customer_themes_dir): settings.STATICFILES_DIRS.insert(0, ('customer_themes', customer_themes_dir)) # This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths # from the redirect mechanics. settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/']
7cc357584ddd4f8e57783b5e0a462b5ad0daf411
footer.py
footer.py
import htmlify from socket import gethostname as hostname from time import time as unixTime def showFooter(): # Footer htmlify.dispHTML("br") htmlify.dispHTML("hr") heart = "<i class=\"fa fa-heart\" aria-hidden=\"true\"></i>" so = "<i class=\"fa fa-stack-overflow\" aria-hidden=\"true\"></i>" tProfileLink = htmlify.getHTML("a", contents="Theo C", href="http://github.com/DyingEcho") iProfileLink = htmlify.getHTML("a", contents="Isaac L", href="http://github.com/il8677") htmlify.dispHTML("small", contents="Made with " + heart + " and " + so + " by " + tProfileLink + " and " + iProfileLink) renderTime = unixTime() htmlify.dispHTML("br") htmlify.dispHTML("small", contents="Rendered at " + str(round(renderTime)) + " by " + hostname())
import htmlify from socket import gethostname as hostname from time import time as unixTime def showFooter(): # Footer htmlify.dispHTML("br") htmlify.dispHTML("hr") heart = "<i class=\"fa fa-heart\" aria-hidden=\"true\" title=\"love\"></i>" so = "<i class=\"fa fa-stack-overflow\" aria-hidden=\"true\" title=\"StackOverflow\"></i>" tProfileLink = htmlify.getHTML("a", contents="Theo C", href="http://github.com/DyingEcho") iProfileLink = htmlify.getHTML("a", contents="Isaac L", href="http://github.com/il8677") htmlify.dispHTML("small", contents="Made with " + heart + " and " + so + " by " + tProfileLink + " and " + iProfileLink) renderTime = unixTime() htmlify.dispHTML("br") htmlify.dispHTML("small", contents="Rendered at " + str(round(renderTime)) + " by " + hostname())
Add titles to FA icons
Add titles to FA icons
Python
apache-2.0
ISD-Sound-and-Lights/InventoryControl
import htmlify from socket import gethostname as hostname from time import time as unixTime def showFooter(): # Footer htmlify.dispHTML("br") htmlify.dispHTML("hr") heart = "<i class=\"fa fa-heart\" aria-hidden=\"true\"></i>" so = "<i class=\"fa fa-stack-overflow\" aria-hidden=\"true\"></i>" tProfileLink = htmlify.getHTML("a", contents="Theo C", href="http://github.com/DyingEcho") iProfileLink = htmlify.getHTML("a", contents="Isaac L", href="http://github.com/il8677") htmlify.dispHTML("small", contents="Made with " + heart + " and " + so + " by " + tProfileLink + " and " + iProfileLink) renderTime = unixTime() htmlify.dispHTML("br") htmlify.dispHTML("small", contents="Rendered at " + str(round(renderTime)) + " by " + hostname()) Add titles to FA icons
import htmlify from socket import gethostname as hostname from time import time as unixTime def showFooter(): # Footer htmlify.dispHTML("br") htmlify.dispHTML("hr") heart = "<i class=\"fa fa-heart\" aria-hidden=\"true\" title=\"love\"></i>" so = "<i class=\"fa fa-stack-overflow\" aria-hidden=\"true\" title=\"StackOverflow\"></i>" tProfileLink = htmlify.getHTML("a", contents="Theo C", href="http://github.com/DyingEcho") iProfileLink = htmlify.getHTML("a", contents="Isaac L", href="http://github.com/il8677") htmlify.dispHTML("small", contents="Made with " + heart + " and " + so + " by " + tProfileLink + " and " + iProfileLink) renderTime = unixTime() htmlify.dispHTML("br") htmlify.dispHTML("small", contents="Rendered at " + str(round(renderTime)) + " by " + hostname())
<commit_before>import htmlify from socket import gethostname as hostname from time import time as unixTime def showFooter(): # Footer htmlify.dispHTML("br") htmlify.dispHTML("hr") heart = "<i class=\"fa fa-heart\" aria-hidden=\"true\"></i>" so = "<i class=\"fa fa-stack-overflow\" aria-hidden=\"true\"></i>" tProfileLink = htmlify.getHTML("a", contents="Theo C", href="http://github.com/DyingEcho") iProfileLink = htmlify.getHTML("a", contents="Isaac L", href="http://github.com/il8677") htmlify.dispHTML("small", contents="Made with " + heart + " and " + so + " by " + tProfileLink + " and " + iProfileLink) renderTime = unixTime() htmlify.dispHTML("br") htmlify.dispHTML("small", contents="Rendered at " + str(round(renderTime)) + " by " + hostname()) <commit_msg>Add titles to FA icons<commit_after>
import htmlify from socket import gethostname as hostname from time import time as unixTime def showFooter(): # Footer htmlify.dispHTML("br") htmlify.dispHTML("hr") heart = "<i class=\"fa fa-heart\" aria-hidden=\"true\" title=\"love\"></i>" so = "<i class=\"fa fa-stack-overflow\" aria-hidden=\"true\" title=\"StackOverflow\"></i>" tProfileLink = htmlify.getHTML("a", contents="Theo C", href="http://github.com/DyingEcho") iProfileLink = htmlify.getHTML("a", contents="Isaac L", href="http://github.com/il8677") htmlify.dispHTML("small", contents="Made with " + heart + " and " + so + " by " + tProfileLink + " and " + iProfileLink) renderTime = unixTime() htmlify.dispHTML("br") htmlify.dispHTML("small", contents="Rendered at " + str(round(renderTime)) + " by " + hostname())
import htmlify from socket import gethostname as hostname from time import time as unixTime def showFooter(): # Footer htmlify.dispHTML("br") htmlify.dispHTML("hr") heart = "<i class=\"fa fa-heart\" aria-hidden=\"true\"></i>" so = "<i class=\"fa fa-stack-overflow\" aria-hidden=\"true\"></i>" tProfileLink = htmlify.getHTML("a", contents="Theo C", href="http://github.com/DyingEcho") iProfileLink = htmlify.getHTML("a", contents="Isaac L", href="http://github.com/il8677") htmlify.dispHTML("small", contents="Made with " + heart + " and " + so + " by " + tProfileLink + " and " + iProfileLink) renderTime = unixTime() htmlify.dispHTML("br") htmlify.dispHTML("small", contents="Rendered at " + str(round(renderTime)) + " by " + hostname()) Add titles to FA iconsimport htmlify from socket import gethostname as hostname from time import time as unixTime def showFooter(): # Footer htmlify.dispHTML("br") htmlify.dispHTML("hr") heart = "<i class=\"fa fa-heart\" aria-hidden=\"true\" title=\"love\"></i>" so = "<i class=\"fa fa-stack-overflow\" aria-hidden=\"true\" title=\"StackOverflow\"></i>" tProfileLink = htmlify.getHTML("a", contents="Theo C", href="http://github.com/DyingEcho") iProfileLink = htmlify.getHTML("a", contents="Isaac L", href="http://github.com/il8677") htmlify.dispHTML("small", contents="Made with " + heart + " and " + so + " by " + tProfileLink + " and " + iProfileLink) renderTime = unixTime() htmlify.dispHTML("br") htmlify.dispHTML("small", contents="Rendered at " + str(round(renderTime)) + " by " + hostname())
<commit_before>import htmlify from socket import gethostname as hostname from time import time as unixTime def showFooter(): # Footer htmlify.dispHTML("br") htmlify.dispHTML("hr") heart = "<i class=\"fa fa-heart\" aria-hidden=\"true\"></i>" so = "<i class=\"fa fa-stack-overflow\" aria-hidden=\"true\"></i>" tProfileLink = htmlify.getHTML("a", contents="Theo C", href="http://github.com/DyingEcho") iProfileLink = htmlify.getHTML("a", contents="Isaac L", href="http://github.com/il8677") htmlify.dispHTML("small", contents="Made with " + heart + " and " + so + " by " + tProfileLink + " and " + iProfileLink) renderTime = unixTime() htmlify.dispHTML("br") htmlify.dispHTML("small", contents="Rendered at " + str(round(renderTime)) + " by " + hostname()) <commit_msg>Add titles to FA icons<commit_after>import htmlify from socket import gethostname as hostname from time import time as unixTime def showFooter(): # Footer htmlify.dispHTML("br") htmlify.dispHTML("hr") heart = "<i class=\"fa fa-heart\" aria-hidden=\"true\" title=\"love\"></i>" so = "<i class=\"fa fa-stack-overflow\" aria-hidden=\"true\" title=\"StackOverflow\"></i>" tProfileLink = htmlify.getHTML("a", contents="Theo C", href="http://github.com/DyingEcho") iProfileLink = htmlify.getHTML("a", contents="Isaac L", href="http://github.com/il8677") htmlify.dispHTML("small", contents="Made with " + heart + " and " + so + " by " + tProfileLink + " and " + iProfileLink) renderTime = unixTime() htmlify.dispHTML("br") htmlify.dispHTML("small", contents="Rendered at " + str(round(renderTime)) + " by " + hostname())
42d038f09bb9b24802ee78f92a5c7a309acf3a7a
zerver/migrations/0301_fix_unread_messages_in_deactivated_streams.py
zerver/migrations/0301_fix_unread_messages_in_deactivated_streams.py
from django.db import connection, migrations from django.db.backends.postgresql.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps def mark_messages_read(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Stream = apps.get_model("zerver", "Stream") deactivated_stream_ids = list(Stream.objects.filter(deactivated=True).values_list('id', flat=True)) with connection.cursor() as cursor: for i in deactivated_stream_ids: cursor.execute(f""" UPDATE zerver_usermessage SET flags = flags | 1 FROM zerver_message INNER JOIN zerver_stream ON zerver_stream.recipient_id = zerver_message.recipient_id WHERE zerver_message.id = zerver_usermessage.message_id AND zerver_stream.id = {i}; """) class Migration(migrations.Migration): """ We're changing the stream deactivation process to make it mark all messages in the stream as read. For things to be consistent with streams that have been deactivated before this change, we need a migration to fix those old streams, to have all messages marked as read. """ atomic = False dependencies = [ ('zerver', '0300_add_attachment_is_web_public'), ] operations = [ migrations.RunPython(mark_messages_read, reverse_code=migrations.RunPython.noop), ]
from django.db import migrations class Migration(migrations.Migration): """ We're changing the stream deactivation process to make it mark all messages in the stream as read. For things to be consistent with streams that have been deactivated before this change, we need a migration to fix those old streams, to have all messages marked as read. """ dependencies = [ ('zerver', '0300_add_attachment_is_web_public'), ] operations = [ migrations.RunSQL( sql=""" UPDATE zerver_usermessage SET flags = flags | 1 FROM zerver_message INNER JOIN zerver_stream ON zerver_stream.recipient_id = zerver_message.recipient_id WHERE zerver_message.id = zerver_usermessage.message_id AND zerver_stream.deactivated; """, reverse_sql="", ), ]
Fix 0301 to replace a Python loop with SQL.
migrations: Fix 0301 to replace a Python loop with SQL. The previous code is correctly flagged by semgrep 0.23 as a violation of our sql-format rule. Signed-off-by: Anders Kaseorg <dfdb7392591db597bc41cf266a9c3bc12a2706e5@zulip.com>
Python
apache-2.0
zulip/zulip,andersk/zulip,showell/zulip,punchagan/zulip,hackerkid/zulip,showell/zulip,eeshangarg/zulip,showell/zulip,rht/zulip,rht/zulip,andersk/zulip,eeshangarg/zulip,zulip/zulip,punchagan/zulip,showell/zulip,andersk/zulip,hackerkid/zulip,rht/zulip,andersk/zulip,punchagan/zulip,zulip/zulip,punchagan/zulip,showell/zulip,eeshangarg/zulip,showell/zulip,eeshangarg/zulip,rht/zulip,kou/zulip,hackerkid/zulip,punchagan/zulip,zulip/zulip,rht/zulip,andersk/zulip,eeshangarg/zulip,kou/zulip,hackerkid/zulip,kou/zulip,andersk/zulip,andersk/zulip,rht/zulip,kou/zulip,hackerkid/zulip,hackerkid/zulip,hackerkid/zulip,eeshangarg/zulip,kou/zulip,zulip/zulip,zulip/zulip,eeshangarg/zulip,kou/zulip,showell/zulip,punchagan/zulip,zulip/zulip,rht/zulip,kou/zulip,punchagan/zulip
from django.db import connection, migrations from django.db.backends.postgresql.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps def mark_messages_read(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Stream = apps.get_model("zerver", "Stream") deactivated_stream_ids = list(Stream.objects.filter(deactivated=True).values_list('id', flat=True)) with connection.cursor() as cursor: for i in deactivated_stream_ids: cursor.execute(f""" UPDATE zerver_usermessage SET flags = flags | 1 FROM zerver_message INNER JOIN zerver_stream ON zerver_stream.recipient_id = zerver_message.recipient_id WHERE zerver_message.id = zerver_usermessage.message_id AND zerver_stream.id = {i}; """) class Migration(migrations.Migration): """ We're changing the stream deactivation process to make it mark all messages in the stream as read. For things to be consistent with streams that have been deactivated before this change, we need a migration to fix those old streams, to have all messages marked as read. """ atomic = False dependencies = [ ('zerver', '0300_add_attachment_is_web_public'), ] operations = [ migrations.RunPython(mark_messages_read, reverse_code=migrations.RunPython.noop), ] migrations: Fix 0301 to replace a Python loop with SQL. The previous code is correctly flagged by semgrep 0.23 as a violation of our sql-format rule. Signed-off-by: Anders Kaseorg <dfdb7392591db597bc41cf266a9c3bc12a2706e5@zulip.com>
from django.db import migrations class Migration(migrations.Migration): """ We're changing the stream deactivation process to make it mark all messages in the stream as read. For things to be consistent with streams that have been deactivated before this change, we need a migration to fix those old streams, to have all messages marked as read. """ dependencies = [ ('zerver', '0300_add_attachment_is_web_public'), ] operations = [ migrations.RunSQL( sql=""" UPDATE zerver_usermessage SET flags = flags | 1 FROM zerver_message INNER JOIN zerver_stream ON zerver_stream.recipient_id = zerver_message.recipient_id WHERE zerver_message.id = zerver_usermessage.message_id AND zerver_stream.deactivated; """, reverse_sql="", ), ]
<commit_before>from django.db import connection, migrations from django.db.backends.postgresql.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps def mark_messages_read(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Stream = apps.get_model("zerver", "Stream") deactivated_stream_ids = list(Stream.objects.filter(deactivated=True).values_list('id', flat=True)) with connection.cursor() as cursor: for i in deactivated_stream_ids: cursor.execute(f""" UPDATE zerver_usermessage SET flags = flags | 1 FROM zerver_message INNER JOIN zerver_stream ON zerver_stream.recipient_id = zerver_message.recipient_id WHERE zerver_message.id = zerver_usermessage.message_id AND zerver_stream.id = {i}; """) class Migration(migrations.Migration): """ We're changing the stream deactivation process to make it mark all messages in the stream as read. For things to be consistent with streams that have been deactivated before this change, we need a migration to fix those old streams, to have all messages marked as read. """ atomic = False dependencies = [ ('zerver', '0300_add_attachment_is_web_public'), ] operations = [ migrations.RunPython(mark_messages_read, reverse_code=migrations.RunPython.noop), ] <commit_msg>migrations: Fix 0301 to replace a Python loop with SQL. The previous code is correctly flagged by semgrep 0.23 as a violation of our sql-format rule. Signed-off-by: Anders Kaseorg <dfdb7392591db597bc41cf266a9c3bc12a2706e5@zulip.com><commit_after>
from django.db import migrations class Migration(migrations.Migration): """ We're changing the stream deactivation process to make it mark all messages in the stream as read. For things to be consistent with streams that have been deactivated before this change, we need a migration to fix those old streams, to have all messages marked as read. """ dependencies = [ ('zerver', '0300_add_attachment_is_web_public'), ] operations = [ migrations.RunSQL( sql=""" UPDATE zerver_usermessage SET flags = flags | 1 FROM zerver_message INNER JOIN zerver_stream ON zerver_stream.recipient_id = zerver_message.recipient_id WHERE zerver_message.id = zerver_usermessage.message_id AND zerver_stream.deactivated; """, reverse_sql="", ), ]
from django.db import connection, migrations from django.db.backends.postgresql.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps def mark_messages_read(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Stream = apps.get_model("zerver", "Stream") deactivated_stream_ids = list(Stream.objects.filter(deactivated=True).values_list('id', flat=True)) with connection.cursor() as cursor: for i in deactivated_stream_ids: cursor.execute(f""" UPDATE zerver_usermessage SET flags = flags | 1 FROM zerver_message INNER JOIN zerver_stream ON zerver_stream.recipient_id = zerver_message.recipient_id WHERE zerver_message.id = zerver_usermessage.message_id AND zerver_stream.id = {i}; """) class Migration(migrations.Migration): """ We're changing the stream deactivation process to make it mark all messages in the stream as read. For things to be consistent with streams that have been deactivated before this change, we need a migration to fix those old streams, to have all messages marked as read. """ atomic = False dependencies = [ ('zerver', '0300_add_attachment_is_web_public'), ] operations = [ migrations.RunPython(mark_messages_read, reverse_code=migrations.RunPython.noop), ] migrations: Fix 0301 to replace a Python loop with SQL. The previous code is correctly flagged by semgrep 0.23 as a violation of our sql-format rule. Signed-off-by: Anders Kaseorg <dfdb7392591db597bc41cf266a9c3bc12a2706e5@zulip.com>from django.db import migrations class Migration(migrations.Migration): """ We're changing the stream deactivation process to make it mark all messages in the stream as read. For things to be consistent with streams that have been deactivated before this change, we need a migration to fix those old streams, to have all messages marked as read. """ dependencies = [ ('zerver', '0300_add_attachment_is_web_public'), ] operations = [ migrations.RunSQL( sql=""" UPDATE zerver_usermessage SET flags = flags | 1 FROM zerver_message INNER JOIN zerver_stream ON zerver_stream.recipient_id = zerver_message.recipient_id WHERE zerver_message.id = zerver_usermessage.message_id AND zerver_stream.deactivated; """, reverse_sql="", ), ]
<commit_before>from django.db import connection, migrations from django.db.backends.postgresql.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps def mark_messages_read(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Stream = apps.get_model("zerver", "Stream") deactivated_stream_ids = list(Stream.objects.filter(deactivated=True).values_list('id', flat=True)) with connection.cursor() as cursor: for i in deactivated_stream_ids: cursor.execute(f""" UPDATE zerver_usermessage SET flags = flags | 1 FROM zerver_message INNER JOIN zerver_stream ON zerver_stream.recipient_id = zerver_message.recipient_id WHERE zerver_message.id = zerver_usermessage.message_id AND zerver_stream.id = {i}; """) class Migration(migrations.Migration): """ We're changing the stream deactivation process to make it mark all messages in the stream as read. For things to be consistent with streams that have been deactivated before this change, we need a migration to fix those old streams, to have all messages marked as read. """ atomic = False dependencies = [ ('zerver', '0300_add_attachment_is_web_public'), ] operations = [ migrations.RunPython(mark_messages_read, reverse_code=migrations.RunPython.noop), ] <commit_msg>migrations: Fix 0301 to replace a Python loop with SQL. The previous code is correctly flagged by semgrep 0.23 as a violation of our sql-format rule. Signed-off-by: Anders Kaseorg <dfdb7392591db597bc41cf266a9c3bc12a2706e5@zulip.com><commit_after>from django.db import migrations class Migration(migrations.Migration): """ We're changing the stream deactivation process to make it mark all messages in the stream as read. For things to be consistent with streams that have been deactivated before this change, we need a migration to fix those old streams, to have all messages marked as read. """ dependencies = [ ('zerver', '0300_add_attachment_is_web_public'), ] operations = [ migrations.RunSQL( sql=""" UPDATE zerver_usermessage SET flags = flags | 1 FROM zerver_message INNER JOIN zerver_stream ON zerver_stream.recipient_id = zerver_message.recipient_id WHERE zerver_message.id = zerver_usermessage.message_id AND zerver_stream.deactivated; """, reverse_sql="", ), ]
19c6017077af2207169b4dbb67b2fe67f0a36568
kpi/backends.py
kpi/backends.py
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import AnonymousUser class ObjectPermissionBackend(ModelBackend): def get_group_permissions(self, user_obj, obj=None): # probably won't be used return super(ObjectPermissionBackend, self ).get_group_permissions(user_obj, obj) def get_all_permissions(self, user_obj, obj=None): return super(ObjectPermissionBackend, self ).get_all_permissions(user_obj, obj) def has_perm(self, user_obj, perm, obj=None): if obj is None or not hasattr(obj, 'has_perm'): return super(ObjectPermissionBackend, self ).has_perm(user_obj, obj) if not user_obj.is_active and not isinstance(user_obj, AnonymousUser): # Inactive users are denied immediately, except in the case of # AnonymousUsers. They are inactive but require further processing return False return obj.has_perm(user_obj, perm) def has_module_perms(self, user_obj, app_label): # probably won't be used return super(ObjectPermissionBackend, self ).has_module_perms(user_obj, app_label)
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import AnonymousUser class ObjectPermissionBackend(ModelBackend): def get_group_permissions(self, user_obj, obj=None): # probably won't be used return super(ObjectPermissionBackend, self ).get_group_permissions(user_obj, obj) def get_all_permissions(self, user_obj, obj=None): return super(ObjectPermissionBackend, self ).get_all_permissions(user_obj, obj) def has_perm(self, user_obj, perm, obj=None): if obj is None or not hasattr(obj, 'has_perm'): return super(ObjectPermissionBackend, self ).has_perm(user_obj, perm, obj) if not user_obj.is_active and not isinstance(user_obj, AnonymousUser): # Inactive users are denied immediately, except in the case of # AnonymousUsers. They are inactive but require further processing return False return obj.has_perm(user_obj, perm) def has_module_perms(self, user_obj, app_label): # probably won't be used return super(ObjectPermissionBackend, self ).has_module_perms(user_obj, app_label)
Fix insanity-inducing argument omission mistake
Fix insanity-inducing argument omission mistake
Python
agpl-3.0
kobotoolbox/kpi,onaio/kpi,onaio/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,onaio/kpi,onaio/kpi
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import AnonymousUser class ObjectPermissionBackend(ModelBackend): def get_group_permissions(self, user_obj, obj=None): # probably won't be used return super(ObjectPermissionBackend, self ).get_group_permissions(user_obj, obj) def get_all_permissions(self, user_obj, obj=None): return super(ObjectPermissionBackend, self ).get_all_permissions(user_obj, obj) def has_perm(self, user_obj, perm, obj=None): if obj is None or not hasattr(obj, 'has_perm'): return super(ObjectPermissionBackend, self ).has_perm(user_obj, obj) if not user_obj.is_active and not isinstance(user_obj, AnonymousUser): # Inactive users are denied immediately, except in the case of # AnonymousUsers. They are inactive but require further processing return False return obj.has_perm(user_obj, perm) def has_module_perms(self, user_obj, app_label): # probably won't be used return super(ObjectPermissionBackend, self ).has_module_perms(user_obj, app_label) Fix insanity-inducing argument omission mistake
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import AnonymousUser class ObjectPermissionBackend(ModelBackend): def get_group_permissions(self, user_obj, obj=None): # probably won't be used return super(ObjectPermissionBackend, self ).get_group_permissions(user_obj, obj) def get_all_permissions(self, user_obj, obj=None): return super(ObjectPermissionBackend, self ).get_all_permissions(user_obj, obj) def has_perm(self, user_obj, perm, obj=None): if obj is None or not hasattr(obj, 'has_perm'): return super(ObjectPermissionBackend, self ).has_perm(user_obj, perm, obj) if not user_obj.is_active and not isinstance(user_obj, AnonymousUser): # Inactive users are denied immediately, except in the case of # AnonymousUsers. They are inactive but require further processing return False return obj.has_perm(user_obj, perm) def has_module_perms(self, user_obj, app_label): # probably won't be used return super(ObjectPermissionBackend, self ).has_module_perms(user_obj, app_label)
<commit_before>from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import AnonymousUser class ObjectPermissionBackend(ModelBackend): def get_group_permissions(self, user_obj, obj=None): # probably won't be used return super(ObjectPermissionBackend, self ).get_group_permissions(user_obj, obj) def get_all_permissions(self, user_obj, obj=None): return super(ObjectPermissionBackend, self ).get_all_permissions(user_obj, obj) def has_perm(self, user_obj, perm, obj=None): if obj is None or not hasattr(obj, 'has_perm'): return super(ObjectPermissionBackend, self ).has_perm(user_obj, obj) if not user_obj.is_active and not isinstance(user_obj, AnonymousUser): # Inactive users are denied immediately, except in the case of # AnonymousUsers. They are inactive but require further processing return False return obj.has_perm(user_obj, perm) def has_module_perms(self, user_obj, app_label): # probably won't be used return super(ObjectPermissionBackend, self ).has_module_perms(user_obj, app_label) <commit_msg>Fix insanity-inducing argument omission mistake<commit_after>
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import AnonymousUser class ObjectPermissionBackend(ModelBackend): def get_group_permissions(self, user_obj, obj=None): # probably won't be used return super(ObjectPermissionBackend, self ).get_group_permissions(user_obj, obj) def get_all_permissions(self, user_obj, obj=None): return super(ObjectPermissionBackend, self ).get_all_permissions(user_obj, obj) def has_perm(self, user_obj, perm, obj=None): if obj is None or not hasattr(obj, 'has_perm'): return super(ObjectPermissionBackend, self ).has_perm(user_obj, perm, obj) if not user_obj.is_active and not isinstance(user_obj, AnonymousUser): # Inactive users are denied immediately, except in the case of # AnonymousUsers. They are inactive but require further processing return False return obj.has_perm(user_obj, perm) def has_module_perms(self, user_obj, app_label): # probably won't be used return super(ObjectPermissionBackend, self ).has_module_perms(user_obj, app_label)
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import AnonymousUser class ObjectPermissionBackend(ModelBackend): def get_group_permissions(self, user_obj, obj=None): # probably won't be used return super(ObjectPermissionBackend, self ).get_group_permissions(user_obj, obj) def get_all_permissions(self, user_obj, obj=None): return super(ObjectPermissionBackend, self ).get_all_permissions(user_obj, obj) def has_perm(self, user_obj, perm, obj=None): if obj is None or not hasattr(obj, 'has_perm'): return super(ObjectPermissionBackend, self ).has_perm(user_obj, obj) if not user_obj.is_active and not isinstance(user_obj, AnonymousUser): # Inactive users are denied immediately, except in the case of # AnonymousUsers. They are inactive but require further processing return False return obj.has_perm(user_obj, perm) def has_module_perms(self, user_obj, app_label): # probably won't be used return super(ObjectPermissionBackend, self ).has_module_perms(user_obj, app_label) Fix insanity-inducing argument omission mistakefrom django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import AnonymousUser class ObjectPermissionBackend(ModelBackend): def get_group_permissions(self, user_obj, obj=None): # probably won't be used return super(ObjectPermissionBackend, self ).get_group_permissions(user_obj, obj) def get_all_permissions(self, user_obj, obj=None): return super(ObjectPermissionBackend, self ).get_all_permissions(user_obj, obj) def has_perm(self, user_obj, perm, obj=None): if obj is None or not hasattr(obj, 'has_perm'): return super(ObjectPermissionBackend, self ).has_perm(user_obj, perm, obj) if not user_obj.is_active and not isinstance(user_obj, AnonymousUser): # Inactive users are denied immediately, except in the case of # AnonymousUsers. They are inactive but require further processing return False return obj.has_perm(user_obj, perm) def has_module_perms(self, user_obj, app_label): # probably won't be used return super(ObjectPermissionBackend, self ).has_module_perms(user_obj, app_label)
<commit_before>from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import AnonymousUser class ObjectPermissionBackend(ModelBackend): def get_group_permissions(self, user_obj, obj=None): # probably won't be used return super(ObjectPermissionBackend, self ).get_group_permissions(user_obj, obj) def get_all_permissions(self, user_obj, obj=None): return super(ObjectPermissionBackend, self ).get_all_permissions(user_obj, obj) def has_perm(self, user_obj, perm, obj=None): if obj is None or not hasattr(obj, 'has_perm'): return super(ObjectPermissionBackend, self ).has_perm(user_obj, obj) if not user_obj.is_active and not isinstance(user_obj, AnonymousUser): # Inactive users are denied immediately, except in the case of # AnonymousUsers. They are inactive but require further processing return False return obj.has_perm(user_obj, perm) def has_module_perms(self, user_obj, app_label): # probably won't be used return super(ObjectPermissionBackend, self ).has_module_perms(user_obj, app_label) <commit_msg>Fix insanity-inducing argument omission mistake<commit_after>from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import AnonymousUser class ObjectPermissionBackend(ModelBackend): def get_group_permissions(self, user_obj, obj=None): # probably won't be used return super(ObjectPermissionBackend, self ).get_group_permissions(user_obj, obj) def get_all_permissions(self, user_obj, obj=None): return super(ObjectPermissionBackend, self ).get_all_permissions(user_obj, obj) def has_perm(self, user_obj, perm, obj=None): if obj is None or not hasattr(obj, 'has_perm'): return super(ObjectPermissionBackend, self ).has_perm(user_obj, perm, obj) if not user_obj.is_active and not isinstance(user_obj, AnonymousUser): # Inactive users are denied immediately, except in the case of # AnonymousUsers. They are inactive but require further processing return False return obj.has_perm(user_obj, perm) def has_module_perms(self, user_obj, app_label): # probably won't be used return super(ObjectPermissionBackend, self ).has_module_perms(user_obj, app_label)
ababb2a603d91f407d4ecfc46ceabb2849413914
test/TestVariableHasSpaces.py
test/TestVariableHasSpaces.py
import unittest from ansiblelint import RulesCollection from ansiblelint.rules.VariableHasSpacesRule import VariableHasSpacesRule from test import RunFromText TASK_VARIABLES = ''' - name: good variable format debug: msg: "{{ good_format }}" - name: good variable format debug: msg: "Value: {{ good_format }}" - name: jinja escaping allowed debug: msg: "{{ '{{' }}" - name: jinja escaping allowed shell: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' - name: jinja whitespace control allowed debug: msg: | {{ good_format }}/ {{- good_format }} {{- good_format -}} - name: bad variable format debug: msg: "{{bad_format}}" - name: bad variable format debug: msg: "Value: {{ bad_format}}" - name: bad variable format debug: msg: "{{bad_format }}" - name: not a jinja variable debug: msg: "test" example: "data = ${lookup{$local_part}lsearch{/etc/aliases}}" ''' class TestVariableHasSpaces(unittest.TestCase): collection = RulesCollection() collection.register(VariableHasSpacesRule()) def setUp(self): self.runner = RunFromText(self.collection) def test_variable_has_spaces(self): results = self.runner.run_role_tasks_main(TASK_VARIABLES) self.assertEqual(3, len(results))
import unittest from ansiblelint import RulesCollection from ansiblelint.rules.VariableHasSpacesRule import VariableHasSpacesRule from test import RunFromText TASK_VARIABLES = ''' - name: good variable format debug: msg: "{{ good_format }}" - name: good variable format debug: msg: "Value: {{ good_format }}" - name: jinja escaping allowed debug: msg: "{{ '{{' }}" - name: jinja escaping allowed shell: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' - name: jinja whitespace control allowed debug: msg: | {{ good_format }}/ {{- good_format }} {{- good_format -}} - name: bad variable format debug: msg: "{{bad_format}}" - name: bad variable format debug: msg: "Value: {{ bad_format}}" - name: bad variable format debug: msg: "{{bad_format }}" - name: not a jinja variable debug: msg: "test" example: "data = ${lookup{$local_part}lsearch{/etc/aliases}}" - name: JSON inside jinja is valid debug: msg: "{{ {'test': {'subtest': variable}} }}" ''' class TestVariableHasSpaces(unittest.TestCase): collection = RulesCollection() collection.register(VariableHasSpacesRule()) def setUp(self): self.runner = RunFromText(self.collection) def test_variable_has_spaces(self): results = self.runner.run_role_tasks_main(TASK_VARIABLES) self.assertEqual(3, len(results))
Add a test for false positive with nested JSON
Add a test for false positive with nested JSON Closes: #791
Python
mit
willthames/ansible-lint
import unittest from ansiblelint import RulesCollection from ansiblelint.rules.VariableHasSpacesRule import VariableHasSpacesRule from test import RunFromText TASK_VARIABLES = ''' - name: good variable format debug: msg: "{{ good_format }}" - name: good variable format debug: msg: "Value: {{ good_format }}" - name: jinja escaping allowed debug: msg: "{{ '{{' }}" - name: jinja escaping allowed shell: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' - name: jinja whitespace control allowed debug: msg: | {{ good_format }}/ {{- good_format }} {{- good_format -}} - name: bad variable format debug: msg: "{{bad_format}}" - name: bad variable format debug: msg: "Value: {{ bad_format}}" - name: bad variable format debug: msg: "{{bad_format }}" - name: not a jinja variable debug: msg: "test" example: "data = ${lookup{$local_part}lsearch{/etc/aliases}}" ''' class TestVariableHasSpaces(unittest.TestCase): collection = RulesCollection() collection.register(VariableHasSpacesRule()) def setUp(self): self.runner = RunFromText(self.collection) def test_variable_has_spaces(self): results = self.runner.run_role_tasks_main(TASK_VARIABLES) self.assertEqual(3, len(results)) Add a test for false positive with nested JSON Closes: #791
import unittest from ansiblelint import RulesCollection from ansiblelint.rules.VariableHasSpacesRule import VariableHasSpacesRule from test import RunFromText TASK_VARIABLES = ''' - name: good variable format debug: msg: "{{ good_format }}" - name: good variable format debug: msg: "Value: {{ good_format }}" - name: jinja escaping allowed debug: msg: "{{ '{{' }}" - name: jinja escaping allowed shell: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' - name: jinja whitespace control allowed debug: msg: | {{ good_format }}/ {{- good_format }} {{- good_format -}} - name: bad variable format debug: msg: "{{bad_format}}" - name: bad variable format debug: msg: "Value: {{ bad_format}}" - name: bad variable format debug: msg: "{{bad_format }}" - name: not a jinja variable debug: msg: "test" example: "data = ${lookup{$local_part}lsearch{/etc/aliases}}" - name: JSON inside jinja is valid debug: msg: "{{ {'test': {'subtest': variable}} }}" ''' class TestVariableHasSpaces(unittest.TestCase): collection = RulesCollection() collection.register(VariableHasSpacesRule()) def setUp(self): self.runner = RunFromText(self.collection) def test_variable_has_spaces(self): results = self.runner.run_role_tasks_main(TASK_VARIABLES) self.assertEqual(3, len(results))
<commit_before>import unittest from ansiblelint import RulesCollection from ansiblelint.rules.VariableHasSpacesRule import VariableHasSpacesRule from test import RunFromText TASK_VARIABLES = ''' - name: good variable format debug: msg: "{{ good_format }}" - name: good variable format debug: msg: "Value: {{ good_format }}" - name: jinja escaping allowed debug: msg: "{{ '{{' }}" - name: jinja escaping allowed shell: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' - name: jinja whitespace control allowed debug: msg: | {{ good_format }}/ {{- good_format }} {{- good_format -}} - name: bad variable format debug: msg: "{{bad_format}}" - name: bad variable format debug: msg: "Value: {{ bad_format}}" - name: bad variable format debug: msg: "{{bad_format }}" - name: not a jinja variable debug: msg: "test" example: "data = ${lookup{$local_part}lsearch{/etc/aliases}}" ''' class TestVariableHasSpaces(unittest.TestCase): collection = RulesCollection() collection.register(VariableHasSpacesRule()) def setUp(self): self.runner = RunFromText(self.collection) def test_variable_has_spaces(self): results = self.runner.run_role_tasks_main(TASK_VARIABLES) self.assertEqual(3, len(results)) <commit_msg>Add a test for false positive with nested JSON Closes: #791<commit_after>
import unittest from ansiblelint import RulesCollection from ansiblelint.rules.VariableHasSpacesRule import VariableHasSpacesRule from test import RunFromText TASK_VARIABLES = ''' - name: good variable format debug: msg: "{{ good_format }}" - name: good variable format debug: msg: "Value: {{ good_format }}" - name: jinja escaping allowed debug: msg: "{{ '{{' }}" - name: jinja escaping allowed shell: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' - name: jinja whitespace control allowed debug: msg: | {{ good_format }}/ {{- good_format }} {{- good_format -}} - name: bad variable format debug: msg: "{{bad_format}}" - name: bad variable format debug: msg: "Value: {{ bad_format}}" - name: bad variable format debug: msg: "{{bad_format }}" - name: not a jinja variable debug: msg: "test" example: "data = ${lookup{$local_part}lsearch{/etc/aliases}}" - name: JSON inside jinja is valid debug: msg: "{{ {'test': {'subtest': variable}} }}" ''' class TestVariableHasSpaces(unittest.TestCase): collection = RulesCollection() collection.register(VariableHasSpacesRule()) def setUp(self): self.runner = RunFromText(self.collection) def test_variable_has_spaces(self): results = self.runner.run_role_tasks_main(TASK_VARIABLES) self.assertEqual(3, len(results))
import unittest from ansiblelint import RulesCollection from ansiblelint.rules.VariableHasSpacesRule import VariableHasSpacesRule from test import RunFromText TASK_VARIABLES = ''' - name: good variable format debug: msg: "{{ good_format }}" - name: good variable format debug: msg: "Value: {{ good_format }}" - name: jinja escaping allowed debug: msg: "{{ '{{' }}" - name: jinja escaping allowed shell: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' - name: jinja whitespace control allowed debug: msg: | {{ good_format }}/ {{- good_format }} {{- good_format -}} - name: bad variable format debug: msg: "{{bad_format}}" - name: bad variable format debug: msg: "Value: {{ bad_format}}" - name: bad variable format debug: msg: "{{bad_format }}" - name: not a jinja variable debug: msg: "test" example: "data = ${lookup{$local_part}lsearch{/etc/aliases}}" ''' class TestVariableHasSpaces(unittest.TestCase): collection = RulesCollection() collection.register(VariableHasSpacesRule()) def setUp(self): self.runner = RunFromText(self.collection) def test_variable_has_spaces(self): results = self.runner.run_role_tasks_main(TASK_VARIABLES) self.assertEqual(3, len(results)) Add a test for false positive with nested JSON Closes: #791import unittest from ansiblelint import RulesCollection from ansiblelint.rules.VariableHasSpacesRule import VariableHasSpacesRule from test import RunFromText TASK_VARIABLES = ''' - name: good variable format debug: msg: "{{ good_format }}" - name: good variable format debug: msg: "Value: {{ good_format }}" - name: jinja escaping allowed debug: msg: "{{ '{{' }}" - name: jinja escaping allowed shell: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' - name: jinja whitespace control allowed debug: msg: | {{ good_format }}/ {{- good_format }} {{- good_format -}} - name: bad variable format debug: msg: "{{bad_format}}" - name: bad variable format debug: msg: "Value: {{ bad_format}}" - name: bad variable format debug: msg: "{{bad_format }}" - name: not a jinja variable debug: msg: "test" example: "data = ${lookup{$local_part}lsearch{/etc/aliases}}" - name: JSON inside jinja is valid debug: msg: "{{ {'test': {'subtest': variable}} }}" ''' class TestVariableHasSpaces(unittest.TestCase): collection = RulesCollection() collection.register(VariableHasSpacesRule()) def setUp(self): self.runner = RunFromText(self.collection) def test_variable_has_spaces(self): results = self.runner.run_role_tasks_main(TASK_VARIABLES) self.assertEqual(3, len(results))
<commit_before>import unittest from ansiblelint import RulesCollection from ansiblelint.rules.VariableHasSpacesRule import VariableHasSpacesRule from test import RunFromText TASK_VARIABLES = ''' - name: good variable format debug: msg: "{{ good_format }}" - name: good variable format debug: msg: "Value: {{ good_format }}" - name: jinja escaping allowed debug: msg: "{{ '{{' }}" - name: jinja escaping allowed shell: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' - name: jinja whitespace control allowed debug: msg: | {{ good_format }}/ {{- good_format }} {{- good_format -}} - name: bad variable format debug: msg: "{{bad_format}}" - name: bad variable format debug: msg: "Value: {{ bad_format}}" - name: bad variable format debug: msg: "{{bad_format }}" - name: not a jinja variable debug: msg: "test" example: "data = ${lookup{$local_part}lsearch{/etc/aliases}}" ''' class TestVariableHasSpaces(unittest.TestCase): collection = RulesCollection() collection.register(VariableHasSpacesRule()) def setUp(self): self.runner = RunFromText(self.collection) def test_variable_has_spaces(self): results = self.runner.run_role_tasks_main(TASK_VARIABLES) self.assertEqual(3, len(results)) <commit_msg>Add a test for false positive with nested JSON Closes: #791<commit_after>import unittest from ansiblelint import RulesCollection from ansiblelint.rules.VariableHasSpacesRule import VariableHasSpacesRule from test import RunFromText TASK_VARIABLES = ''' - name: good variable format debug: msg: "{{ good_format }}" - name: good variable format debug: msg: "Value: {{ good_format }}" - name: jinja escaping allowed debug: msg: "{{ '{{' }}" - name: jinja escaping allowed shell: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' - name: jinja whitespace control allowed debug: msg: | {{ good_format }}/ {{- good_format }} {{- good_format -}} - name: bad variable format debug: msg: "{{bad_format}}" - name: bad variable format debug: msg: "Value: {{ bad_format}}" - name: bad variable format debug: msg: "{{bad_format }}" - name: not a jinja variable debug: msg: "test" example: "data = ${lookup{$local_part}lsearch{/etc/aliases}}" - name: JSON inside jinja is valid debug: msg: "{{ {'test': {'subtest': variable}} }}" ''' class TestVariableHasSpaces(unittest.TestCase): collection = RulesCollection() collection.register(VariableHasSpacesRule()) def setUp(self): self.runner = RunFromText(self.collection) def test_variable_has_spaces(self): results = self.runner.run_role_tasks_main(TASK_VARIABLES) self.assertEqual(3, len(results))
172b6b417cbd3bc2ffacf7f38b3a49f84510d13c
localeurl/models.py
localeurl/models.py
from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse()
from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) if reverse_kwargs!=None: locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) else: locale = translation.get_language() url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse()
Handle situation when kwargs is None
Handle situation when kwargs is None
Python
mit
jmagnusson/django-localeurl,simonluijk/django-localeurl
from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse() Handle situation when kwargs is None
from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) if reverse_kwargs!=None: locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) else: locale = translation.get_language() url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse()
<commit_before>from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse() <commit_msg>Handle situation when kwargs is None<commit_after>
from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) if reverse_kwargs!=None: locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) else: locale = translation.get_language() url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse()
from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse() Handle situation when kwargs is Nonefrom django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) if reverse_kwargs!=None: locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) else: locale = translation.get_language() url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse()
<commit_before>from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse() <commit_msg>Handle situation when kwargs is None<commit_after>from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) if reverse_kwargs!=None: locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) else: locale = translation.get_language() url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse()
aff5a09eb3d61f77cb277b076820481b8ba145d5
tests/test_coroutine.py
tests/test_coroutine.py
import tests try: import asyncio exec('''if 1: def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield from asyncio.sleep(delay) result.append('World') ''') except ImportError: import trollius as asyncio from trollius import From def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield From(asyncio.sleep(delay)) result.append('World') class CallbackTests(tests.TestCase): def test_hello_world(self): result = [] self.loop.run_until_complete(hello_world(result, 0.001)) self.assertEqual(result, ['Hello', 'World']) if __name__ == '__main__': import unittest unittest.main()
import tests try: import asyncio exec('''if 1: def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield from asyncio.sleep(delay) result.append('World') return "." def waiter(result): loop = asyncio.get_event_loop() fut = asyncio.Future(loop=loop) loop.call_soon(fut.set_result, "Future") value = yield from fut result.append(value) value = yield from hello_world(result, 0.001) result.append(value) ''') except ImportError: import trollius as asyncio from trollius import From, Return def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield From(asyncio.sleep(delay)) result.append('World') raise Return(".") def waiter(result): loop = asyncio.get_event_loop() fut = asyncio.Future(loop=loop) loop.call_soon(fut.set_result, "Future") value = yield From(fut) result.append(value) value = yield From(hello_world(result, 0.001)) result.append(value) class CallbackTests(tests.TestCase): def test_hello_world(self): result = [] self.loop.run_until_complete(hello_world(result, 0.001)) self.assertEqual(result, ['Hello', 'World']) def test_waiter(self): result = [] self.loop.run_until_complete(waiter(result)) self.assertEqual(result, ['Future', 'Hello', 'World', '.']) if __name__ == '__main__': import unittest unittest.main()
Add more complex coroutine example
Add more complex coroutine example
Python
apache-2.0
overcastcloud/aioeventlet
import tests try: import asyncio exec('''if 1: def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield from asyncio.sleep(delay) result.append('World') ''') except ImportError: import trollius as asyncio from trollius import From def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield From(asyncio.sleep(delay)) result.append('World') class CallbackTests(tests.TestCase): def test_hello_world(self): result = [] self.loop.run_until_complete(hello_world(result, 0.001)) self.assertEqual(result, ['Hello', 'World']) if __name__ == '__main__': import unittest unittest.main() Add more complex coroutine example
import tests try: import asyncio exec('''if 1: def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield from asyncio.sleep(delay) result.append('World') return "." def waiter(result): loop = asyncio.get_event_loop() fut = asyncio.Future(loop=loop) loop.call_soon(fut.set_result, "Future") value = yield from fut result.append(value) value = yield from hello_world(result, 0.001) result.append(value) ''') except ImportError: import trollius as asyncio from trollius import From, Return def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield From(asyncio.sleep(delay)) result.append('World') raise Return(".") def waiter(result): loop = asyncio.get_event_loop() fut = asyncio.Future(loop=loop) loop.call_soon(fut.set_result, "Future") value = yield From(fut) result.append(value) value = yield From(hello_world(result, 0.001)) result.append(value) class CallbackTests(tests.TestCase): def test_hello_world(self): result = [] self.loop.run_until_complete(hello_world(result, 0.001)) self.assertEqual(result, ['Hello', 'World']) def test_waiter(self): result = [] self.loop.run_until_complete(waiter(result)) self.assertEqual(result, ['Future', 'Hello', 'World', '.']) if __name__ == '__main__': import unittest unittest.main()
<commit_before>import tests try: import asyncio exec('''if 1: def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield from asyncio.sleep(delay) result.append('World') ''') except ImportError: import trollius as asyncio from trollius import From def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield From(asyncio.sleep(delay)) result.append('World') class CallbackTests(tests.TestCase): def test_hello_world(self): result = [] self.loop.run_until_complete(hello_world(result, 0.001)) self.assertEqual(result, ['Hello', 'World']) if __name__ == '__main__': import unittest unittest.main() <commit_msg>Add more complex coroutine example<commit_after>
import tests try: import asyncio exec('''if 1: def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield from asyncio.sleep(delay) result.append('World') return "." def waiter(result): loop = asyncio.get_event_loop() fut = asyncio.Future(loop=loop) loop.call_soon(fut.set_result, "Future") value = yield from fut result.append(value) value = yield from hello_world(result, 0.001) result.append(value) ''') except ImportError: import trollius as asyncio from trollius import From, Return def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield From(asyncio.sleep(delay)) result.append('World') raise Return(".") def waiter(result): loop = asyncio.get_event_loop() fut = asyncio.Future(loop=loop) loop.call_soon(fut.set_result, "Future") value = yield From(fut) result.append(value) value = yield From(hello_world(result, 0.001)) result.append(value) class CallbackTests(tests.TestCase): def test_hello_world(self): result = [] self.loop.run_until_complete(hello_world(result, 0.001)) self.assertEqual(result, ['Hello', 'World']) def test_waiter(self): result = [] self.loop.run_until_complete(waiter(result)) self.assertEqual(result, ['Future', 'Hello', 'World', '.']) if __name__ == '__main__': import unittest unittest.main()
import tests try: import asyncio exec('''if 1: def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield from asyncio.sleep(delay) result.append('World') ''') except ImportError: import trollius as asyncio from trollius import From def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield From(asyncio.sleep(delay)) result.append('World') class CallbackTests(tests.TestCase): def test_hello_world(self): result = [] self.loop.run_until_complete(hello_world(result, 0.001)) self.assertEqual(result, ['Hello', 'World']) if __name__ == '__main__': import unittest unittest.main() Add more complex coroutine exampleimport tests try: import asyncio exec('''if 1: def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield from asyncio.sleep(delay) result.append('World') return "." def waiter(result): loop = asyncio.get_event_loop() fut = asyncio.Future(loop=loop) loop.call_soon(fut.set_result, "Future") value = yield from fut result.append(value) value = yield from hello_world(result, 0.001) result.append(value) ''') except ImportError: import trollius as asyncio from trollius import From, Return def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield From(asyncio.sleep(delay)) result.append('World') raise Return(".") def waiter(result): loop = asyncio.get_event_loop() fut = asyncio.Future(loop=loop) loop.call_soon(fut.set_result, "Future") value = yield From(fut) result.append(value) value = yield From(hello_world(result, 0.001)) result.append(value) class CallbackTests(tests.TestCase): def test_hello_world(self): result = [] self.loop.run_until_complete(hello_world(result, 0.001)) self.assertEqual(result, ['Hello', 'World']) def test_waiter(self): result = [] self.loop.run_until_complete(waiter(result)) self.assertEqual(result, ['Future', 'Hello', 'World', '.']) if __name__ == '__main__': import unittest unittest.main()
<commit_before>import tests try: import asyncio exec('''if 1: def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield from asyncio.sleep(delay) result.append('World') ''') except ImportError: import trollius as asyncio from trollius import From def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield From(asyncio.sleep(delay)) result.append('World') class CallbackTests(tests.TestCase): def test_hello_world(self): result = [] self.loop.run_until_complete(hello_world(result, 0.001)) self.assertEqual(result, ['Hello', 'World']) if __name__ == '__main__': import unittest unittest.main() <commit_msg>Add more complex coroutine example<commit_after>import tests try: import asyncio exec('''if 1: def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield from asyncio.sleep(delay) result.append('World') return "." def waiter(result): loop = asyncio.get_event_loop() fut = asyncio.Future(loop=loop) loop.call_soon(fut.set_result, "Future") value = yield from fut result.append(value) value = yield from hello_world(result, 0.001) result.append(value) ''') except ImportError: import trollius as asyncio from trollius import From, Return def hello_world(result, delay): result.append("Hello") # retrieve the event loop from the policy yield From(asyncio.sleep(delay)) result.append('World') raise Return(".") def waiter(result): loop = asyncio.get_event_loop() fut = asyncio.Future(loop=loop) loop.call_soon(fut.set_result, "Future") value = yield From(fut) result.append(value) value = yield From(hello_world(result, 0.001)) result.append(value) class CallbackTests(tests.TestCase): def test_hello_world(self): result = [] self.loop.run_until_complete(hello_world(result, 0.001)) self.assertEqual(result, ['Hello', 'World']) def test_waiter(self): result = [] self.loop.run_until_complete(waiter(result)) self.assertEqual(result, ['Future', 'Hello', 'World', '.']) if __name__ == '__main__': import unittest unittest.main()
42bfa6b69697c0c093a961df5708f477288a6efa
icekit/plugins/twitter_embed/forms.py
icekit/plugins/twitter_embed/forms.py
import re from django import forms from fluent_contents.forms import ContentItemForm class TwitterEmbedAdminForm(ContentItemForm): def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url
import re from django import forms from fluent_contents.forms import ContentItemForm from icekit.plugins.twitter_embed.models import TwitterEmbedItem class TwitterEmbedAdminForm(ContentItemForm): class Meta: model = TwitterEmbedItem fields = '__all__' def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url
Add model and firld information to form.
Add model and firld information to form.
Python
mit
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
import re from django import forms from fluent_contents.forms import ContentItemForm class TwitterEmbedAdminForm(ContentItemForm): def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url Add model and firld information to form.
import re from django import forms from fluent_contents.forms import ContentItemForm from icekit.plugins.twitter_embed.models import TwitterEmbedItem class TwitterEmbedAdminForm(ContentItemForm): class Meta: model = TwitterEmbedItem fields = '__all__' def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url
<commit_before>import re from django import forms from fluent_contents.forms import ContentItemForm class TwitterEmbedAdminForm(ContentItemForm): def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url <commit_msg>Add model and firld information to form.<commit_after>
import re from django import forms from fluent_contents.forms import ContentItemForm from icekit.plugins.twitter_embed.models import TwitterEmbedItem class TwitterEmbedAdminForm(ContentItemForm): class Meta: model = TwitterEmbedItem fields = '__all__' def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url
import re from django import forms from fluent_contents.forms import ContentItemForm class TwitterEmbedAdminForm(ContentItemForm): def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url Add model and firld information to form.import re from django import forms from fluent_contents.forms import ContentItemForm from icekit.plugins.twitter_embed.models import TwitterEmbedItem class TwitterEmbedAdminForm(ContentItemForm): class Meta: model = TwitterEmbedItem fields = '__all__' def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url
<commit_before>import re from django import forms from fluent_contents.forms import ContentItemForm class TwitterEmbedAdminForm(ContentItemForm): def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url <commit_msg>Add model and firld information to form.<commit_after>import re from django import forms from fluent_contents.forms import ContentItemForm from icekit.plugins.twitter_embed.models import TwitterEmbedItem class TwitterEmbedAdminForm(ContentItemForm): class Meta: model = TwitterEmbedItem fields = '__all__' def clean_twitter_url(self): """ Make sure the URL provided matches the twitter URL format. """ url = self.cleaned_data['twitter_url'] if url: pattern = re.compile(r'https?://(www\.)?twitter.com/\S+/status(es)?/\S+') if not pattern.match(url): raise forms.ValidationError('Please provide a valid twitter link.') return url
21bb022987c54b8c3343cfe5f4994203d799dc20
street_score/project/urls.py
street_score/project/urls.py
from django.conf.urls import patterns, include, url from django.views.generic import TemplateView from . import resources, views # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session$', resources.SurveySessionView.as_view(), name='survey_session_instance'), url(r'^survey_sessions/$', resources.SurveySessionListView.as_view(), name='survey_session_list'), url(r'^block_ratings/$', resources.BlockRatingListView.as_view(), name='block_rating_list'), url(r'^data/?$', views.csv_data, name='data_csv_list'), )
from django.conf.urls import patterns, include, url from django.views.generic import TemplateView from . import resources, views # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session$', resources.SurveySessionView.as_view(), name='survey_session_instance'), url(r'^survey_sessions/$', resources.SurveySessionListView.as_view(), name='survey_session_list'), url(r'^block_ratings/$', resources.BlockRatingListView.as_view(), name='block_rating_list'), url(r'^data/?$', views.csv_data, name='data_csv_list'), )
Remove gis dependency in admin
Remove gis dependency in admin
Python
mit
openplans/streetscore,openplans/streetscore,openplans/streetscore
from django.conf.urls import patterns, include, url from django.views.generic import TemplateView from . import resources, views # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session$', resources.SurveySessionView.as_view(), name='survey_session_instance'), url(r'^survey_sessions/$', resources.SurveySessionListView.as_view(), name='survey_session_list'), url(r'^block_ratings/$', resources.BlockRatingListView.as_view(), name='block_rating_list'), url(r'^data/?$', views.csv_data, name='data_csv_list'), ) Remove gis dependency in admin
from django.conf.urls import patterns, include, url from django.views.generic import TemplateView from . import resources, views # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session$', resources.SurveySessionView.as_view(), name='survey_session_instance'), url(r'^survey_sessions/$', resources.SurveySessionListView.as_view(), name='survey_session_list'), url(r'^block_ratings/$', resources.BlockRatingListView.as_view(), name='block_rating_list'), url(r'^data/?$', views.csv_data, name='data_csv_list'), )
<commit_before>from django.conf.urls import patterns, include, url from django.views.generic import TemplateView from . import resources, views # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session$', resources.SurveySessionView.as_view(), name='survey_session_instance'), url(r'^survey_sessions/$', resources.SurveySessionListView.as_view(), name='survey_session_list'), url(r'^block_ratings/$', resources.BlockRatingListView.as_view(), name='block_rating_list'), url(r'^data/?$', views.csv_data, name='data_csv_list'), ) <commit_msg>Remove gis dependency in admin<commit_after>
from django.conf.urls import patterns, include, url from django.views.generic import TemplateView from . import resources, views # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session$', resources.SurveySessionView.as_view(), name='survey_session_instance'), url(r'^survey_sessions/$', resources.SurveySessionListView.as_view(), name='survey_session_list'), url(r'^block_ratings/$', resources.BlockRatingListView.as_view(), name='block_rating_list'), url(r'^data/?$', views.csv_data, name='data_csv_list'), )
from django.conf.urls import patterns, include, url from django.views.generic import TemplateView from . import resources, views # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session$', resources.SurveySessionView.as_view(), name='survey_session_instance'), url(r'^survey_sessions/$', resources.SurveySessionListView.as_view(), name='survey_session_list'), url(r'^block_ratings/$', resources.BlockRatingListView.as_view(), name='block_rating_list'), url(r'^data/?$', views.csv_data, name='data_csv_list'), ) Remove gis dependency in adminfrom django.conf.urls import patterns, include, url from django.views.generic import TemplateView from . import resources, views # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session$', resources.SurveySessionView.as_view(), name='survey_session_instance'), url(r'^survey_sessions/$', resources.SurveySessionListView.as_view(), name='survey_session_list'), url(r'^block_ratings/$', resources.BlockRatingListView.as_view(), name='block_rating_list'), url(r'^data/?$', views.csv_data, name='data_csv_list'), )
<commit_before>from django.conf.urls import patterns, include, url from django.views.generic import TemplateView from . import resources, views # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session$', resources.SurveySessionView.as_view(), name='survey_session_instance'), url(r'^survey_sessions/$', resources.SurveySessionListView.as_view(), name='survey_session_list'), url(r'^block_ratings/$', resources.BlockRatingListView.as_view(), name='block_rating_list'), url(r'^data/?$', views.csv_data, name='data_csv_list'), ) <commit_msg>Remove gis dependency in admin<commit_after>from django.conf.urls import patterns, include, url from django.views.generic import TemplateView from . import resources, views # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session$', resources.SurveySessionView.as_view(), name='survey_session_instance'), url(r'^survey_sessions/$', resources.SurveySessionListView.as_view(), name='survey_session_list'), url(r'^block_ratings/$', resources.BlockRatingListView.as_view(), name='block_rating_list'), url(r'^data/?$', views.csv_data, name='data_csv_list'), )
10a787c9f2147081001239029146b5b049db17f0
featureflow/__init__.py
featureflow/__init__.py
__version__ = '1.16.14' from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \ KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \ InMemoryDatabase from datawriter import DataWriter from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from objectstore import ObjectStoreDatabase from persistence import PersistenceSettings from iteratornode import IteratorNode from eventlog import EventLog, RedisChannel try: from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \ BaseNumpyDecoder, NumpyMetaData, NumpyFeature except ImportError: pass
__version__ = '1.16.14' from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \ KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \ InMemoryDatabase from datawriter import DataWriter from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder, PickleDecoder from lmdbstore import LmdbDatabase from objectstore import ObjectStoreDatabase from persistence import PersistenceSettings from iteratornode import IteratorNode from eventlog import EventLog, RedisChannel try: from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \ BaseNumpyDecoder, NumpyMetaData, NumpyFeature except ImportError: pass
Add PickleDecoder to the public API
Add PickleDecoder to the public API
Python
mit
JohnVinyard/featureflow,JohnVinyard/featureflow
__version__ = '1.16.14' from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \ KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \ InMemoryDatabase from datawriter import DataWriter from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from objectstore import ObjectStoreDatabase from persistence import PersistenceSettings from iteratornode import IteratorNode from eventlog import EventLog, RedisChannel try: from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \ BaseNumpyDecoder, NumpyMetaData, NumpyFeature except ImportError: pass Add PickleDecoder to the public API
__version__ = '1.16.14' from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \ KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \ InMemoryDatabase from datawriter import DataWriter from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder, PickleDecoder from lmdbstore import LmdbDatabase from objectstore import ObjectStoreDatabase from persistence import PersistenceSettings from iteratornode import IteratorNode from eventlog import EventLog, RedisChannel try: from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \ BaseNumpyDecoder, NumpyMetaData, NumpyFeature except ImportError: pass
<commit_before>__version__ = '1.16.14' from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \ KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \ InMemoryDatabase from datawriter import DataWriter from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from objectstore import ObjectStoreDatabase from persistence import PersistenceSettings from iteratornode import IteratorNode from eventlog import EventLog, RedisChannel try: from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \ BaseNumpyDecoder, NumpyMetaData, NumpyFeature except ImportError: pass <commit_msg>Add PickleDecoder to the public API<commit_after>
__version__ = '1.16.14' from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \ KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \ InMemoryDatabase from datawriter import DataWriter from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder, PickleDecoder from lmdbstore import LmdbDatabase from objectstore import ObjectStoreDatabase from persistence import PersistenceSettings from iteratornode import IteratorNode from eventlog import EventLog, RedisChannel try: from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \ BaseNumpyDecoder, NumpyMetaData, NumpyFeature except ImportError: pass
__version__ = '1.16.14' from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \ KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \ InMemoryDatabase from datawriter import DataWriter from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from objectstore import ObjectStoreDatabase from persistence import PersistenceSettings from iteratornode import IteratorNode from eventlog import EventLog, RedisChannel try: from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \ BaseNumpyDecoder, NumpyMetaData, NumpyFeature except ImportError: pass Add PickleDecoder to the public API__version__ = '1.16.14' from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \ KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \ InMemoryDatabase from datawriter import DataWriter from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder, PickleDecoder from lmdbstore import LmdbDatabase from objectstore import ObjectStoreDatabase from persistence import PersistenceSettings from iteratornode import IteratorNode from eventlog import EventLog, RedisChannel try: from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \ BaseNumpyDecoder, NumpyMetaData, NumpyFeature except ImportError: pass
<commit_before>__version__ = '1.16.14' from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \ KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \ InMemoryDatabase from datawriter import DataWriter from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from objectstore import ObjectStoreDatabase from persistence import PersistenceSettings from iteratornode import IteratorNode from eventlog import EventLog, RedisChannel try: from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \ BaseNumpyDecoder, NumpyMetaData, NumpyFeature except ImportError: pass <commit_msg>Add PickleDecoder to the public API<commit_after>__version__ = '1.16.14' from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \ KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \ InMemoryDatabase from datawriter import DataWriter from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder, PickleDecoder from lmdbstore import LmdbDatabase from objectstore import ObjectStoreDatabase from persistence import PersistenceSettings from iteratornode import IteratorNode from eventlog import EventLog, RedisChannel try: from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \ BaseNumpyDecoder, NumpyMetaData, NumpyFeature except ImportError: pass
e62e090f2282426d14dad52a06eeca788789846f
kpi/serializers/v2/user_asset_subscription.py
kpi/serializers/v2/user_asset_subscription.py
# coding: utf-8 from django.utils.translation import ugettext as _ from rest_framework import serializers from kpi.constants import ( ASSET_TYPE_COLLECTION, PERM_DISCOVER_ASSET, PERM_VIEW_ASSET ) from kpi.fields import RelativePrefixHyperlinkedRelatedField from kpi.models import Asset from kpi.models import UserAssetSubscription from kpi.models.object_permission import get_anonymous_user, get_objects_for_user class UserAssetSubscriptionSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( lookup_field='uid', view_name='userassetsubscription-detail' ) asset = RelativePrefixHyperlinkedRelatedField( lookup_field='uid', view_name='asset-detail', queryset=Asset.objects.none() # will be set in __init__() ) uid = serializers.ReadOnlyField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['asset'].queryset = get_objects_for_user( get_anonymous_user(), [PERM_VIEW_ASSET, PERM_DISCOVER_ASSET], Asset ) class Meta: model = UserAssetSubscription lookup_field = 'uid' fields = ('url', 'asset', 'uid') def validate_asset(self, asset): if asset.asset_type != ASSET_TYPE_COLLECTION: raise serializers.ValidationError( _('Invalid asset type. Only `{asset_type}`').format( asset_type=ASSET_TYPE_COLLECTION ) ) return asset
# coding: utf-8 from django.utils.translation import ugettext as _ from rest_framework import serializers from kpi.constants import ( ASSET_TYPE_COLLECTION, PERM_DISCOVER_ASSET, PERM_VIEW_ASSET ) from kpi.fields import RelativePrefixHyperlinkedRelatedField from kpi.models import Asset from kpi.models import UserAssetSubscription from kpi.models.object_permission import get_anonymous_user, get_objects_for_user class UserAssetSubscriptionSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( lookup_field='uid', view_name='userassetsubscription-detail' ) asset = RelativePrefixHyperlinkedRelatedField( lookup_field='uid', view_name='asset-detail', queryset=Asset.objects.none() # will be set in __init__() ) uid = serializers.ReadOnlyField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['asset'].queryset = get_objects_for_user( get_anonymous_user(), [PERM_VIEW_ASSET, PERM_DISCOVER_ASSET], Asset ) class Meta: model = UserAssetSubscription lookup_field = 'uid' fields = ('url', 'asset', 'uid') def validate_asset(self, asset): if asset.asset_type != ASSET_TYPE_COLLECTION: raise serializers.ValidationError( _('Invalid asset type. Only `{asset_type}` is allowed').format( asset_type=ASSET_TYPE_COLLECTION ) ) return asset
Improve (a tiny bit) validation error message
Improve (a tiny bit) validation error message
Python
agpl-3.0
kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi
# coding: utf-8 from django.utils.translation import ugettext as _ from rest_framework import serializers from kpi.constants import ( ASSET_TYPE_COLLECTION, PERM_DISCOVER_ASSET, PERM_VIEW_ASSET ) from kpi.fields import RelativePrefixHyperlinkedRelatedField from kpi.models import Asset from kpi.models import UserAssetSubscription from kpi.models.object_permission import get_anonymous_user, get_objects_for_user class UserAssetSubscriptionSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( lookup_field='uid', view_name='userassetsubscription-detail' ) asset = RelativePrefixHyperlinkedRelatedField( lookup_field='uid', view_name='asset-detail', queryset=Asset.objects.none() # will be set in __init__() ) uid = serializers.ReadOnlyField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['asset'].queryset = get_objects_for_user( get_anonymous_user(), [PERM_VIEW_ASSET, PERM_DISCOVER_ASSET], Asset ) class Meta: model = UserAssetSubscription lookup_field = 'uid' fields = ('url', 'asset', 'uid') def validate_asset(self, asset): if asset.asset_type != ASSET_TYPE_COLLECTION: raise serializers.ValidationError( _('Invalid asset type. Only `{asset_type}`').format( asset_type=ASSET_TYPE_COLLECTION ) ) return asset Improve (a tiny bit) validation error message
# coding: utf-8 from django.utils.translation import ugettext as _ from rest_framework import serializers from kpi.constants import ( ASSET_TYPE_COLLECTION, PERM_DISCOVER_ASSET, PERM_VIEW_ASSET ) from kpi.fields import RelativePrefixHyperlinkedRelatedField from kpi.models import Asset from kpi.models import UserAssetSubscription from kpi.models.object_permission import get_anonymous_user, get_objects_for_user class UserAssetSubscriptionSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( lookup_field='uid', view_name='userassetsubscription-detail' ) asset = RelativePrefixHyperlinkedRelatedField( lookup_field='uid', view_name='asset-detail', queryset=Asset.objects.none() # will be set in __init__() ) uid = serializers.ReadOnlyField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['asset'].queryset = get_objects_for_user( get_anonymous_user(), [PERM_VIEW_ASSET, PERM_DISCOVER_ASSET], Asset ) class Meta: model = UserAssetSubscription lookup_field = 'uid' fields = ('url', 'asset', 'uid') def validate_asset(self, asset): if asset.asset_type != ASSET_TYPE_COLLECTION: raise serializers.ValidationError( _('Invalid asset type. Only `{asset_type}` is allowed').format( asset_type=ASSET_TYPE_COLLECTION ) ) return asset
<commit_before># coding: utf-8 from django.utils.translation import ugettext as _ from rest_framework import serializers from kpi.constants import ( ASSET_TYPE_COLLECTION, PERM_DISCOVER_ASSET, PERM_VIEW_ASSET ) from kpi.fields import RelativePrefixHyperlinkedRelatedField from kpi.models import Asset from kpi.models import UserAssetSubscription from kpi.models.object_permission import get_anonymous_user, get_objects_for_user class UserAssetSubscriptionSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( lookup_field='uid', view_name='userassetsubscription-detail' ) asset = RelativePrefixHyperlinkedRelatedField( lookup_field='uid', view_name='asset-detail', queryset=Asset.objects.none() # will be set in __init__() ) uid = serializers.ReadOnlyField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['asset'].queryset = get_objects_for_user( get_anonymous_user(), [PERM_VIEW_ASSET, PERM_DISCOVER_ASSET], Asset ) class Meta: model = UserAssetSubscription lookup_field = 'uid' fields = ('url', 'asset', 'uid') def validate_asset(self, asset): if asset.asset_type != ASSET_TYPE_COLLECTION: raise serializers.ValidationError( _('Invalid asset type. Only `{asset_type}`').format( asset_type=ASSET_TYPE_COLLECTION ) ) return asset <commit_msg>Improve (a tiny bit) validation error message<commit_after>
# coding: utf-8 from django.utils.translation import ugettext as _ from rest_framework import serializers from kpi.constants import ( ASSET_TYPE_COLLECTION, PERM_DISCOVER_ASSET, PERM_VIEW_ASSET ) from kpi.fields import RelativePrefixHyperlinkedRelatedField from kpi.models import Asset from kpi.models import UserAssetSubscription from kpi.models.object_permission import get_anonymous_user, get_objects_for_user class UserAssetSubscriptionSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( lookup_field='uid', view_name='userassetsubscription-detail' ) asset = RelativePrefixHyperlinkedRelatedField( lookup_field='uid', view_name='asset-detail', queryset=Asset.objects.none() # will be set in __init__() ) uid = serializers.ReadOnlyField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['asset'].queryset = get_objects_for_user( get_anonymous_user(), [PERM_VIEW_ASSET, PERM_DISCOVER_ASSET], Asset ) class Meta: model = UserAssetSubscription lookup_field = 'uid' fields = ('url', 'asset', 'uid') def validate_asset(self, asset): if asset.asset_type != ASSET_TYPE_COLLECTION: raise serializers.ValidationError( _('Invalid asset type. Only `{asset_type}` is allowed').format( asset_type=ASSET_TYPE_COLLECTION ) ) return asset
# coding: utf-8 from django.utils.translation import ugettext as _ from rest_framework import serializers from kpi.constants import ( ASSET_TYPE_COLLECTION, PERM_DISCOVER_ASSET, PERM_VIEW_ASSET ) from kpi.fields import RelativePrefixHyperlinkedRelatedField from kpi.models import Asset from kpi.models import UserAssetSubscription from kpi.models.object_permission import get_anonymous_user, get_objects_for_user class UserAssetSubscriptionSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( lookup_field='uid', view_name='userassetsubscription-detail' ) asset = RelativePrefixHyperlinkedRelatedField( lookup_field='uid', view_name='asset-detail', queryset=Asset.objects.none() # will be set in __init__() ) uid = serializers.ReadOnlyField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['asset'].queryset = get_objects_for_user( get_anonymous_user(), [PERM_VIEW_ASSET, PERM_DISCOVER_ASSET], Asset ) class Meta: model = UserAssetSubscription lookup_field = 'uid' fields = ('url', 'asset', 'uid') def validate_asset(self, asset): if asset.asset_type != ASSET_TYPE_COLLECTION: raise serializers.ValidationError( _('Invalid asset type. Only `{asset_type}`').format( asset_type=ASSET_TYPE_COLLECTION ) ) return asset Improve (a tiny bit) validation error message# coding: utf-8 from django.utils.translation import ugettext as _ from rest_framework import serializers from kpi.constants import ( ASSET_TYPE_COLLECTION, PERM_DISCOVER_ASSET, PERM_VIEW_ASSET ) from kpi.fields import RelativePrefixHyperlinkedRelatedField from kpi.models import Asset from kpi.models import UserAssetSubscription from kpi.models.object_permission import get_anonymous_user, get_objects_for_user class UserAssetSubscriptionSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( lookup_field='uid', view_name='userassetsubscription-detail' ) asset = RelativePrefixHyperlinkedRelatedField( lookup_field='uid', view_name='asset-detail', queryset=Asset.objects.none() # will be set in __init__() ) uid = serializers.ReadOnlyField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['asset'].queryset = get_objects_for_user( get_anonymous_user(), [PERM_VIEW_ASSET, PERM_DISCOVER_ASSET], Asset ) class Meta: model = UserAssetSubscription lookup_field = 'uid' fields = ('url', 'asset', 'uid') def validate_asset(self, asset): if asset.asset_type != ASSET_TYPE_COLLECTION: raise serializers.ValidationError( _('Invalid asset type. Only `{asset_type}` is allowed').format( asset_type=ASSET_TYPE_COLLECTION ) ) return asset
<commit_before># coding: utf-8 from django.utils.translation import ugettext as _ from rest_framework import serializers from kpi.constants import ( ASSET_TYPE_COLLECTION, PERM_DISCOVER_ASSET, PERM_VIEW_ASSET ) from kpi.fields import RelativePrefixHyperlinkedRelatedField from kpi.models import Asset from kpi.models import UserAssetSubscription from kpi.models.object_permission import get_anonymous_user, get_objects_for_user class UserAssetSubscriptionSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( lookup_field='uid', view_name='userassetsubscription-detail' ) asset = RelativePrefixHyperlinkedRelatedField( lookup_field='uid', view_name='asset-detail', queryset=Asset.objects.none() # will be set in __init__() ) uid = serializers.ReadOnlyField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['asset'].queryset = get_objects_for_user( get_anonymous_user(), [PERM_VIEW_ASSET, PERM_DISCOVER_ASSET], Asset ) class Meta: model = UserAssetSubscription lookup_field = 'uid' fields = ('url', 'asset', 'uid') def validate_asset(self, asset): if asset.asset_type != ASSET_TYPE_COLLECTION: raise serializers.ValidationError( _('Invalid asset type. Only `{asset_type}`').format( asset_type=ASSET_TYPE_COLLECTION ) ) return asset <commit_msg>Improve (a tiny bit) validation error message<commit_after># coding: utf-8 from django.utils.translation import ugettext as _ from rest_framework import serializers from kpi.constants import ( ASSET_TYPE_COLLECTION, PERM_DISCOVER_ASSET, PERM_VIEW_ASSET ) from kpi.fields import RelativePrefixHyperlinkedRelatedField from kpi.models import Asset from kpi.models import UserAssetSubscription from kpi.models.object_permission import get_anonymous_user, get_objects_for_user class UserAssetSubscriptionSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( lookup_field='uid', view_name='userassetsubscription-detail' ) asset = RelativePrefixHyperlinkedRelatedField( lookup_field='uid', view_name='asset-detail', queryset=Asset.objects.none() # will be set in __init__() ) uid = serializers.ReadOnlyField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['asset'].queryset = get_objects_for_user( get_anonymous_user(), [PERM_VIEW_ASSET, PERM_DISCOVER_ASSET], Asset ) class Meta: model = UserAssetSubscription lookup_field = 'uid' fields = ('url', 'asset', 'uid') def validate_asset(self, asset): if asset.asset_type != ASSET_TYPE_COLLECTION: raise serializers.ValidationError( _('Invalid asset type. Only `{asset_type}` is allowed').format( asset_type=ASSET_TYPE_COLLECTION ) ) return asset
984089c3e963998d62768721f23d7e7c72880e39
tests/testapp/test_fhadmin.py
tests/testapp/test_fhadmin.py
from django.contrib.auth.models import User from django.test import Client, TestCase class AdminTest(TestCase): def login(self): client = Client() u = User.objects.create( username="test", is_active=True, is_staff=True, is_superuser=True ) client.force_login(u) return client def test_dashboard(self): client = self.login() response = client.get("/admin/") self.assertContains(response, '<div class="groups">') self.assertContains(response, "<h2>Modules</h2>") self.assertContains(response, "<h2>Preferences</h2>") print(response, response.content.decode("utf-8"))
from django.contrib import admin from django.contrib.auth.models import User from django.test import Client, RequestFactory, TestCase from fhadmin.templatetags.fhadmin_module_groups import generate_group_list class AdminTest(TestCase): def login(self): client = Client() u = User.objects.create( username="test", is_active=True, is_staff=True, is_superuser=True ) client.force_login(u) return client def test_dashboard(self): client = self.login() response = client.get("/admin/") self.assertContains(response, '<div class="groups">') self.assertContains(response, "<h2>Modules</h2>") self.assertContains(response, "<h2>Preferences</h2>") # print(response, response.content.decode("utf-8")) def test_app_list(self): request = RequestFactory().get("/") request.user = User.objects.create(is_superuser=True) groups = list(generate_group_list(admin.sites.site, request)) # from pprint import pprint; pprint(groups) self.assertEqual(groups[0][0], "Modules") self.assertEqual(groups[0][1][0]["app_label"], "testapp") self.assertEqual(len(groups[0][1][0]["models"]), 1)
Test the app list generation a bit
Test the app list generation a bit
Python
bsd-3-clause
feinheit/django-fhadmin,feinheit/django-fhadmin,feinheit/django-fhadmin
from django.contrib.auth.models import User from django.test import Client, TestCase class AdminTest(TestCase): def login(self): client = Client() u = User.objects.create( username="test", is_active=True, is_staff=True, is_superuser=True ) client.force_login(u) return client def test_dashboard(self): client = self.login() response = client.get("/admin/") self.assertContains(response, '<div class="groups">') self.assertContains(response, "<h2>Modules</h2>") self.assertContains(response, "<h2>Preferences</h2>") print(response, response.content.decode("utf-8")) Test the app list generation a bit
from django.contrib import admin from django.contrib.auth.models import User from django.test import Client, RequestFactory, TestCase from fhadmin.templatetags.fhadmin_module_groups import generate_group_list class AdminTest(TestCase): def login(self): client = Client() u = User.objects.create( username="test", is_active=True, is_staff=True, is_superuser=True ) client.force_login(u) return client def test_dashboard(self): client = self.login() response = client.get("/admin/") self.assertContains(response, '<div class="groups">') self.assertContains(response, "<h2>Modules</h2>") self.assertContains(response, "<h2>Preferences</h2>") # print(response, response.content.decode("utf-8")) def test_app_list(self): request = RequestFactory().get("/") request.user = User.objects.create(is_superuser=True) groups = list(generate_group_list(admin.sites.site, request)) # from pprint import pprint; pprint(groups) self.assertEqual(groups[0][0], "Modules") self.assertEqual(groups[0][1][0]["app_label"], "testapp") self.assertEqual(len(groups[0][1][0]["models"]), 1)
<commit_before>from django.contrib.auth.models import User from django.test import Client, TestCase class AdminTest(TestCase): def login(self): client = Client() u = User.objects.create( username="test", is_active=True, is_staff=True, is_superuser=True ) client.force_login(u) return client def test_dashboard(self): client = self.login() response = client.get("/admin/") self.assertContains(response, '<div class="groups">') self.assertContains(response, "<h2>Modules</h2>") self.assertContains(response, "<h2>Preferences</h2>") print(response, response.content.decode("utf-8")) <commit_msg>Test the app list generation a bit<commit_after>
from django.contrib import admin from django.contrib.auth.models import User from django.test import Client, RequestFactory, TestCase from fhadmin.templatetags.fhadmin_module_groups import generate_group_list class AdminTest(TestCase): def login(self): client = Client() u = User.objects.create( username="test", is_active=True, is_staff=True, is_superuser=True ) client.force_login(u) return client def test_dashboard(self): client = self.login() response = client.get("/admin/") self.assertContains(response, '<div class="groups">') self.assertContains(response, "<h2>Modules</h2>") self.assertContains(response, "<h2>Preferences</h2>") # print(response, response.content.decode("utf-8")) def test_app_list(self): request = RequestFactory().get("/") request.user = User.objects.create(is_superuser=True) groups = list(generate_group_list(admin.sites.site, request)) # from pprint import pprint; pprint(groups) self.assertEqual(groups[0][0], "Modules") self.assertEqual(groups[0][1][0]["app_label"], "testapp") self.assertEqual(len(groups[0][1][0]["models"]), 1)
from django.contrib.auth.models import User from django.test import Client, TestCase class AdminTest(TestCase): def login(self): client = Client() u = User.objects.create( username="test", is_active=True, is_staff=True, is_superuser=True ) client.force_login(u) return client def test_dashboard(self): client = self.login() response = client.get("/admin/") self.assertContains(response, '<div class="groups">') self.assertContains(response, "<h2>Modules</h2>") self.assertContains(response, "<h2>Preferences</h2>") print(response, response.content.decode("utf-8")) Test the app list generation a bitfrom django.contrib import admin from django.contrib.auth.models import User from django.test import Client, RequestFactory, TestCase from fhadmin.templatetags.fhadmin_module_groups import generate_group_list class AdminTest(TestCase): def login(self): client = Client() u = User.objects.create( username="test", is_active=True, is_staff=True, is_superuser=True ) client.force_login(u) return client def test_dashboard(self): client = self.login() response = client.get("/admin/") self.assertContains(response, '<div class="groups">') self.assertContains(response, "<h2>Modules</h2>") self.assertContains(response, "<h2>Preferences</h2>") # print(response, response.content.decode("utf-8")) def test_app_list(self): request = RequestFactory().get("/") request.user = User.objects.create(is_superuser=True) groups = list(generate_group_list(admin.sites.site, request)) # from pprint import pprint; pprint(groups) self.assertEqual(groups[0][0], "Modules") self.assertEqual(groups[0][1][0]["app_label"], "testapp") self.assertEqual(len(groups[0][1][0]["models"]), 1)
<commit_before>from django.contrib.auth.models import User from django.test import Client, TestCase class AdminTest(TestCase): def login(self): client = Client() u = User.objects.create( username="test", is_active=True, is_staff=True, is_superuser=True ) client.force_login(u) return client def test_dashboard(self): client = self.login() response = client.get("/admin/") self.assertContains(response, '<div class="groups">') self.assertContains(response, "<h2>Modules</h2>") self.assertContains(response, "<h2>Preferences</h2>") print(response, response.content.decode("utf-8")) <commit_msg>Test the app list generation a bit<commit_after>from django.contrib import admin from django.contrib.auth.models import User from django.test import Client, RequestFactory, TestCase from fhadmin.templatetags.fhadmin_module_groups import generate_group_list class AdminTest(TestCase): def login(self): client = Client() u = User.objects.create( username="test", is_active=True, is_staff=True, is_superuser=True ) client.force_login(u) return client def test_dashboard(self): client = self.login() response = client.get("/admin/") self.assertContains(response, '<div class="groups">') self.assertContains(response, "<h2>Modules</h2>") self.assertContains(response, "<h2>Preferences</h2>") # print(response, response.content.decode("utf-8")) def test_app_list(self): request = RequestFactory().get("/") request.user = User.objects.create(is_superuser=True) groups = list(generate_group_list(admin.sites.site, request)) # from pprint import pprint; pprint(groups) self.assertEqual(groups[0][0], "Modules") self.assertEqual(groups[0][1][0]["app_label"], "testapp") self.assertEqual(len(groups[0][1][0]["models"]), 1)
d7ea417103bbe5a5c314b65a48dd823aca5df658
webpipe/xrender_test.py
webpipe/xrender_test.py
#!/usr/bin/python -S """ xrender_test.py: Tests for xrender.py """ import unittest import xrender # module under test CSV = """\ name,age <carol>,10 <dave>,20 """ class FunctionsTest(unittest.TestCase): def testRenderCsv(self): html, orig = xrender.RenderCsv('dir/foo.csv', 'foo.csv', CSV) print html def testGuessFileType(self): self.assertEqual('png', xrender.GuessFileType('Rplot001.png')) self.assertEqual('ansi', xrender.GuessFileType('typescript')) def testCleanFilename(self): print xrender.CleanFilename('foo-bar_baz') print xrender.CleanFilename('foo bar') print xrender.CleanFilename('foo bar <>&') print xrender.CleanFilename('foo bar \\ @ ') class ResourcesTest(unittest.TestCase): def testResources(self): res = xrender.Resources() p = res.GetPluginBin('ansi') print p p = res.GetPluginBin('unknown') print p if __name__ == '__main__': unittest.main()
#!/usr/bin/python -S """ xrender_test.py: Tests for xrender.py """ import unittest import xrender # module under test CSV = """\ name,age <carol>,10 <dave>,20 """ class FunctionsTest(unittest.TestCase): def testGuessFileType(self): self.assertEqual('png', xrender.GuessFileType('Rplot001.png')) self.assertEqual('ansi', xrender.GuessFileType('typescript')) def testCleanFilename(self): print xrender.CleanFilename('foo-bar_baz') print xrender.CleanFilename('foo bar') print xrender.CleanFilename('foo bar <>&') print xrender.CleanFilename('foo bar \\ @ ') class ResourcesTest(unittest.TestCase): def testResources(self): res = xrender.Resources() p = res.GetPluginBin('ansi') print p p = res.GetPluginBin('unknown') print p if __name__ == '__main__': unittest.main()
Remove test moved to plugin.
Remove test moved to plugin.
Python
bsd-3-clause
andychu/webpipe,andychu/webpipe,andychu/webpipe,andychu/webpipe,andychu/webpipe
#!/usr/bin/python -S """ xrender_test.py: Tests for xrender.py """ import unittest import xrender # module under test CSV = """\ name,age <carol>,10 <dave>,20 """ class FunctionsTest(unittest.TestCase): def testRenderCsv(self): html, orig = xrender.RenderCsv('dir/foo.csv', 'foo.csv', CSV) print html def testGuessFileType(self): self.assertEqual('png', xrender.GuessFileType('Rplot001.png')) self.assertEqual('ansi', xrender.GuessFileType('typescript')) def testCleanFilename(self): print xrender.CleanFilename('foo-bar_baz') print xrender.CleanFilename('foo bar') print xrender.CleanFilename('foo bar <>&') print xrender.CleanFilename('foo bar \\ @ ') class ResourcesTest(unittest.TestCase): def testResources(self): res = xrender.Resources() p = res.GetPluginBin('ansi') print p p = res.GetPluginBin('unknown') print p if __name__ == '__main__': unittest.main() Remove test moved to plugin.
#!/usr/bin/python -S """ xrender_test.py: Tests for xrender.py """ import unittest import xrender # module under test CSV = """\ name,age <carol>,10 <dave>,20 """ class FunctionsTest(unittest.TestCase): def testGuessFileType(self): self.assertEqual('png', xrender.GuessFileType('Rplot001.png')) self.assertEqual('ansi', xrender.GuessFileType('typescript')) def testCleanFilename(self): print xrender.CleanFilename('foo-bar_baz') print xrender.CleanFilename('foo bar') print xrender.CleanFilename('foo bar <>&') print xrender.CleanFilename('foo bar \\ @ ') class ResourcesTest(unittest.TestCase): def testResources(self): res = xrender.Resources() p = res.GetPluginBin('ansi') print p p = res.GetPluginBin('unknown') print p if __name__ == '__main__': unittest.main()
<commit_before>#!/usr/bin/python -S """ xrender_test.py: Tests for xrender.py """ import unittest import xrender # module under test CSV = """\ name,age <carol>,10 <dave>,20 """ class FunctionsTest(unittest.TestCase): def testRenderCsv(self): html, orig = xrender.RenderCsv('dir/foo.csv', 'foo.csv', CSV) print html def testGuessFileType(self): self.assertEqual('png', xrender.GuessFileType('Rplot001.png')) self.assertEqual('ansi', xrender.GuessFileType('typescript')) def testCleanFilename(self): print xrender.CleanFilename('foo-bar_baz') print xrender.CleanFilename('foo bar') print xrender.CleanFilename('foo bar <>&') print xrender.CleanFilename('foo bar \\ @ ') class ResourcesTest(unittest.TestCase): def testResources(self): res = xrender.Resources() p = res.GetPluginBin('ansi') print p p = res.GetPluginBin('unknown') print p if __name__ == '__main__': unittest.main() <commit_msg>Remove test moved to plugin.<commit_after>
#!/usr/bin/python -S """ xrender_test.py: Tests for xrender.py """ import unittest import xrender # module under test CSV = """\ name,age <carol>,10 <dave>,20 """ class FunctionsTest(unittest.TestCase): def testGuessFileType(self): self.assertEqual('png', xrender.GuessFileType('Rplot001.png')) self.assertEqual('ansi', xrender.GuessFileType('typescript')) def testCleanFilename(self): print xrender.CleanFilename('foo-bar_baz') print xrender.CleanFilename('foo bar') print xrender.CleanFilename('foo bar <>&') print xrender.CleanFilename('foo bar \\ @ ') class ResourcesTest(unittest.TestCase): def testResources(self): res = xrender.Resources() p = res.GetPluginBin('ansi') print p p = res.GetPluginBin('unknown') print p if __name__ == '__main__': unittest.main()
#!/usr/bin/python -S """ xrender_test.py: Tests for xrender.py """ import unittest import xrender # module under test CSV = """\ name,age <carol>,10 <dave>,20 """ class FunctionsTest(unittest.TestCase): def testRenderCsv(self): html, orig = xrender.RenderCsv('dir/foo.csv', 'foo.csv', CSV) print html def testGuessFileType(self): self.assertEqual('png', xrender.GuessFileType('Rplot001.png')) self.assertEqual('ansi', xrender.GuessFileType('typescript')) def testCleanFilename(self): print xrender.CleanFilename('foo-bar_baz') print xrender.CleanFilename('foo bar') print xrender.CleanFilename('foo bar <>&') print xrender.CleanFilename('foo bar \\ @ ') class ResourcesTest(unittest.TestCase): def testResources(self): res = xrender.Resources() p = res.GetPluginBin('ansi') print p p = res.GetPluginBin('unknown') print p if __name__ == '__main__': unittest.main() Remove test moved to plugin.#!/usr/bin/python -S """ xrender_test.py: Tests for xrender.py """ import unittest import xrender # module under test CSV = """\ name,age <carol>,10 <dave>,20 """ class FunctionsTest(unittest.TestCase): def testGuessFileType(self): self.assertEqual('png', xrender.GuessFileType('Rplot001.png')) self.assertEqual('ansi', xrender.GuessFileType('typescript')) def testCleanFilename(self): print xrender.CleanFilename('foo-bar_baz') print xrender.CleanFilename('foo bar') print xrender.CleanFilename('foo bar <>&') print xrender.CleanFilename('foo bar \\ @ ') class ResourcesTest(unittest.TestCase): def testResources(self): res = xrender.Resources() p = res.GetPluginBin('ansi') print p p = res.GetPluginBin('unknown') print p if __name__ == '__main__': unittest.main()
<commit_before>#!/usr/bin/python -S """ xrender_test.py: Tests for xrender.py """ import unittest import xrender # module under test CSV = """\ name,age <carol>,10 <dave>,20 """ class FunctionsTest(unittest.TestCase): def testRenderCsv(self): html, orig = xrender.RenderCsv('dir/foo.csv', 'foo.csv', CSV) print html def testGuessFileType(self): self.assertEqual('png', xrender.GuessFileType('Rplot001.png')) self.assertEqual('ansi', xrender.GuessFileType('typescript')) def testCleanFilename(self): print xrender.CleanFilename('foo-bar_baz') print xrender.CleanFilename('foo bar') print xrender.CleanFilename('foo bar <>&') print xrender.CleanFilename('foo bar \\ @ ') class ResourcesTest(unittest.TestCase): def testResources(self): res = xrender.Resources() p = res.GetPluginBin('ansi') print p p = res.GetPluginBin('unknown') print p if __name__ == '__main__': unittest.main() <commit_msg>Remove test moved to plugin.<commit_after>#!/usr/bin/python -S """ xrender_test.py: Tests for xrender.py """ import unittest import xrender # module under test CSV = """\ name,age <carol>,10 <dave>,20 """ class FunctionsTest(unittest.TestCase): def testGuessFileType(self): self.assertEqual('png', xrender.GuessFileType('Rplot001.png')) self.assertEqual('ansi', xrender.GuessFileType('typescript')) def testCleanFilename(self): print xrender.CleanFilename('foo-bar_baz') print xrender.CleanFilename('foo bar') print xrender.CleanFilename('foo bar <>&') print xrender.CleanFilename('foo bar \\ @ ') class ResourcesTest(unittest.TestCase): def testResources(self): res = xrender.Resources() p = res.GetPluginBin('ansi') print p p = res.GetPluginBin('unknown') print p if __name__ == '__main__': unittest.main()
c8c610c7249100e3e514b029a2f4209866910f3a
lumos/source.py
lumos/source.py
""" Client/Source Generates and sends E1.31 packets over UDP """ import socket import struct from packet import E131Packet def ip_from_universe(universe): # derive multicast IP address from Universe high_byte = (universe >> 8) & 0xff low_byte = universe & 0xff return "239.255.{}.{}".format(high_byte, low_byte) class DMXSource(object): def __init__(self, universe=1, network_segment=1): self.universe = universe self.ip = ip_from_universe(universe) # open UDP socket self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # set ttl to limit network segment reach ttl = struct.pack('b', network_segment) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) def send_data(self, data): packet = E131Packet(universe=self.universe, data=data) self.sock.sendto(packet.packet_data, (self.ip, 5568))
""" Client/Source Generates and sends E1.31 packets over UDP """ import socket import struct from packet import E131Packet def ip_from_universe(universe): # derive multicast IP address from Universe high_byte = (universe >> 8) & 0xff low_byte = universe & 0xff return "239.255.{}.{}".format(high_byte, low_byte) class DMXSource(object): """ bind_ip is the IP address assigned to a specific HW interface """ def __init__(self, universe=1, network_segment=1, bind_ip=None): self.universe = universe self.ip = ip_from_universe(universe) # open UDP socket self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if bind_ip: self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, socket.inet_aton(bind_ip)) # set ttl to limit network segment reach ttl = struct.pack('b', network_segment) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) def send_data(self, data): packet = E131Packet(universe=self.universe, data=data) self.sock.sendto(packet.packet_data, (self.ip, 5568))
Allow a specific address to be specified for sending
Allow a specific address to be specified for sending
Python
bsd-3-clause
ptone/Lumos
""" Client/Source Generates and sends E1.31 packets over UDP """ import socket import struct from packet import E131Packet def ip_from_universe(universe): # derive multicast IP address from Universe high_byte = (universe >> 8) & 0xff low_byte = universe & 0xff return "239.255.{}.{}".format(high_byte, low_byte) class DMXSource(object): def __init__(self, universe=1, network_segment=1): self.universe = universe self.ip = ip_from_universe(universe) # open UDP socket self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # set ttl to limit network segment reach ttl = struct.pack('b', network_segment) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) def send_data(self, data): packet = E131Packet(universe=self.universe, data=data) self.sock.sendto(packet.packet_data, (self.ip, 5568)) Allow a specific address to be specified for sending
""" Client/Source Generates and sends E1.31 packets over UDP """ import socket import struct from packet import E131Packet def ip_from_universe(universe): # derive multicast IP address from Universe high_byte = (universe >> 8) & 0xff low_byte = universe & 0xff return "239.255.{}.{}".format(high_byte, low_byte) class DMXSource(object): """ bind_ip is the IP address assigned to a specific HW interface """ def __init__(self, universe=1, network_segment=1, bind_ip=None): self.universe = universe self.ip = ip_from_universe(universe) # open UDP socket self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if bind_ip: self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, socket.inet_aton(bind_ip)) # set ttl to limit network segment reach ttl = struct.pack('b', network_segment) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) def send_data(self, data): packet = E131Packet(universe=self.universe, data=data) self.sock.sendto(packet.packet_data, (self.ip, 5568))
<commit_before>""" Client/Source Generates and sends E1.31 packets over UDP """ import socket import struct from packet import E131Packet def ip_from_universe(universe): # derive multicast IP address from Universe high_byte = (universe >> 8) & 0xff low_byte = universe & 0xff return "239.255.{}.{}".format(high_byte, low_byte) class DMXSource(object): def __init__(self, universe=1, network_segment=1): self.universe = universe self.ip = ip_from_universe(universe) # open UDP socket self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # set ttl to limit network segment reach ttl = struct.pack('b', network_segment) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) def send_data(self, data): packet = E131Packet(universe=self.universe, data=data) self.sock.sendto(packet.packet_data, (self.ip, 5568)) <commit_msg>Allow a specific address to be specified for sending<commit_after>
""" Client/Source Generates and sends E1.31 packets over UDP """ import socket import struct from packet import E131Packet def ip_from_universe(universe): # derive multicast IP address from Universe high_byte = (universe >> 8) & 0xff low_byte = universe & 0xff return "239.255.{}.{}".format(high_byte, low_byte) class DMXSource(object): """ bind_ip is the IP address assigned to a specific HW interface """ def __init__(self, universe=1, network_segment=1, bind_ip=None): self.universe = universe self.ip = ip_from_universe(universe) # open UDP socket self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if bind_ip: self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, socket.inet_aton(bind_ip)) # set ttl to limit network segment reach ttl = struct.pack('b', network_segment) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) def send_data(self, data): packet = E131Packet(universe=self.universe, data=data) self.sock.sendto(packet.packet_data, (self.ip, 5568))
""" Client/Source Generates and sends E1.31 packets over UDP """ import socket import struct from packet import E131Packet def ip_from_universe(universe): # derive multicast IP address from Universe high_byte = (universe >> 8) & 0xff low_byte = universe & 0xff return "239.255.{}.{}".format(high_byte, low_byte) class DMXSource(object): def __init__(self, universe=1, network_segment=1): self.universe = universe self.ip = ip_from_universe(universe) # open UDP socket self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # set ttl to limit network segment reach ttl = struct.pack('b', network_segment) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) def send_data(self, data): packet = E131Packet(universe=self.universe, data=data) self.sock.sendto(packet.packet_data, (self.ip, 5568)) Allow a specific address to be specified for sending""" Client/Source Generates and sends E1.31 packets over UDP """ import socket import struct from packet import E131Packet def ip_from_universe(universe): # derive multicast IP address from Universe high_byte = (universe >> 8) & 0xff low_byte = universe & 0xff return "239.255.{}.{}".format(high_byte, low_byte) class DMXSource(object): """ bind_ip is the IP address assigned to a specific HW interface """ def __init__(self, universe=1, network_segment=1, bind_ip=None): self.universe = universe self.ip = ip_from_universe(universe) # open UDP socket self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if bind_ip: self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, socket.inet_aton(bind_ip)) # set ttl to limit network segment reach ttl = struct.pack('b', network_segment) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) def send_data(self, data): packet = E131Packet(universe=self.universe, data=data) self.sock.sendto(packet.packet_data, (self.ip, 5568))
<commit_before>""" Client/Source Generates and sends E1.31 packets over UDP """ import socket import struct from packet import E131Packet def ip_from_universe(universe): # derive multicast IP address from Universe high_byte = (universe >> 8) & 0xff low_byte = universe & 0xff return "239.255.{}.{}".format(high_byte, low_byte) class DMXSource(object): def __init__(self, universe=1, network_segment=1): self.universe = universe self.ip = ip_from_universe(universe) # open UDP socket self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # set ttl to limit network segment reach ttl = struct.pack('b', network_segment) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) def send_data(self, data): packet = E131Packet(universe=self.universe, data=data) self.sock.sendto(packet.packet_data, (self.ip, 5568)) <commit_msg>Allow a specific address to be specified for sending<commit_after>""" Client/Source Generates and sends E1.31 packets over UDP """ import socket import struct from packet import E131Packet def ip_from_universe(universe): # derive multicast IP address from Universe high_byte = (universe >> 8) & 0xff low_byte = universe & 0xff return "239.255.{}.{}".format(high_byte, low_byte) class DMXSource(object): """ bind_ip is the IP address assigned to a specific HW interface """ def __init__(self, universe=1, network_segment=1, bind_ip=None): self.universe = universe self.ip = ip_from_universe(universe) # open UDP socket self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if bind_ip: self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, socket.inet_aton(bind_ip)) # set ttl to limit network segment reach ttl = struct.pack('b', network_segment) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) def send_data(self, data): packet = E131Packet(universe=self.universe, data=data) self.sock.sendto(packet.packet_data, (self.ip, 5568))
f9eac3523d4ab72d3abbfa8ee57801466552f18a
speedbar/modules/hostinformation.py
speedbar/modules/hostinformation.py
from __future__ import absolute_import from .base import BaseModule import os class HostInformationModule(BaseModule): key = 'host' def get_metrics(self): return {'name': os.uname()[1]} def init(): return HostInformationModule
import socket from .base import BaseModule class HostInformationModule(BaseModule): key = 'host' def get_metrics(self): return {'name': socket.gethostname()} def init(): return HostInformationModule
Use more portable function to get hostname
Use more portable function to get hostname This addresses #11
Python
mit
mixcloud/django-speedbar,theospears/django-speedbar,theospears/django-speedbar,mixcloud/django-speedbar,mixcloud/django-speedbar,theospears/django-speedbar
from __future__ import absolute_import from .base import BaseModule import os class HostInformationModule(BaseModule): key = 'host' def get_metrics(self): return {'name': os.uname()[1]} def init(): return HostInformationModule Use more portable function to get hostname This addresses #11
import socket from .base import BaseModule class HostInformationModule(BaseModule): key = 'host' def get_metrics(self): return {'name': socket.gethostname()} def init(): return HostInformationModule
<commit_before>from __future__ import absolute_import from .base import BaseModule import os class HostInformationModule(BaseModule): key = 'host' def get_metrics(self): return {'name': os.uname()[1]} def init(): return HostInformationModule <commit_msg>Use more portable function to get hostname This addresses #11<commit_after>
import socket from .base import BaseModule class HostInformationModule(BaseModule): key = 'host' def get_metrics(self): return {'name': socket.gethostname()} def init(): return HostInformationModule
from __future__ import absolute_import from .base import BaseModule import os class HostInformationModule(BaseModule): key = 'host' def get_metrics(self): return {'name': os.uname()[1]} def init(): return HostInformationModule Use more portable function to get hostname This addresses #11import socket from .base import BaseModule class HostInformationModule(BaseModule): key = 'host' def get_metrics(self): return {'name': socket.gethostname()} def init(): return HostInformationModule
<commit_before>from __future__ import absolute_import from .base import BaseModule import os class HostInformationModule(BaseModule): key = 'host' def get_metrics(self): return {'name': os.uname()[1]} def init(): return HostInformationModule <commit_msg>Use more portable function to get hostname This addresses #11<commit_after>import socket from .base import BaseModule class HostInformationModule(BaseModule): key = 'host' def get_metrics(self): return {'name': socket.gethostname()} def init(): return HostInformationModule
025da22574df8423bfdfea2f7b5bded5ab55054f
manage.py
manage.py
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "QuesCheetah.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
#!/usr/bin/env python import os import sys if __name__ == "__main__": from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
DELETE - delete default setting
DELETE - delete default setting
Python
mit
mingkim/QuesCheetah,mingkim/QuesCheetah,mingkim/QuesCheetah,mingkim/QuesCheetah
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "QuesCheetah.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) DELETE - delete default setting
#!/usr/bin/env python import os import sys if __name__ == "__main__": from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
<commit_before>#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "QuesCheetah.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) <commit_msg>DELETE - delete default setting<commit_after>
#!/usr/bin/env python import os import sys if __name__ == "__main__": from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "QuesCheetah.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) DELETE - delete default setting#!/usr/bin/env python import os import sys if __name__ == "__main__": from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
<commit_before>#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "QuesCheetah.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) <commit_msg>DELETE - delete default setting<commit_after>#!/usr/bin/env python import os import sys if __name__ == "__main__": from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
8623aae8778307648e4a0380d84ca7dc7a63f3f2
oneflow/core/context_processors.py
oneflow/core/context_processors.py
# -*- coding: utf-8 -*- from .models.nonrel import User def mongodb_user(request): if request.user.is_anonymous(): return {u'mongodb_user': None} try: mongodb_user = User.objects.get(id=request.session[u'mongodb_user_id']) except KeyError: mongodb_user = User.objects.get(django_user=request.user.id) # Cache it for next time. request.session[u'mongodb_user_id'] = mongodb_user.id return {u'mongodb_user': mongodb_user}
# -*- coding: utf-8 -*- def mongodb_user(request): """ not the most usefull context manager in the world. """ if request.user.is_anonymous(): return {u'mongodb_user': None} return {u'mongodb_user': request.user.mongo}
Simplify the context processor. Not very useful anymore, in fact.
Simplify the context processor. Not very useful anymore, in fact.
Python
agpl-3.0
1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow
# -*- coding: utf-8 -*- from .models.nonrel import User def mongodb_user(request): if request.user.is_anonymous(): return {u'mongodb_user': None} try: mongodb_user = User.objects.get(id=request.session[u'mongodb_user_id']) except KeyError: mongodb_user = User.objects.get(django_user=request.user.id) # Cache it for next time. request.session[u'mongodb_user_id'] = mongodb_user.id return {u'mongodb_user': mongodb_user} Simplify the context processor. Not very useful anymore, in fact.
# -*- coding: utf-8 -*- def mongodb_user(request): """ not the most usefull context manager in the world. """ if request.user.is_anonymous(): return {u'mongodb_user': None} return {u'mongodb_user': request.user.mongo}
<commit_before># -*- coding: utf-8 -*- from .models.nonrel import User def mongodb_user(request): if request.user.is_anonymous(): return {u'mongodb_user': None} try: mongodb_user = User.objects.get(id=request.session[u'mongodb_user_id']) except KeyError: mongodb_user = User.objects.get(django_user=request.user.id) # Cache it for next time. request.session[u'mongodb_user_id'] = mongodb_user.id return {u'mongodb_user': mongodb_user} <commit_msg>Simplify the context processor. Not very useful anymore, in fact.<commit_after>
# -*- coding: utf-8 -*- def mongodb_user(request): """ not the most usefull context manager in the world. """ if request.user.is_anonymous(): return {u'mongodb_user': None} return {u'mongodb_user': request.user.mongo}
# -*- coding: utf-8 -*- from .models.nonrel import User def mongodb_user(request): if request.user.is_anonymous(): return {u'mongodb_user': None} try: mongodb_user = User.objects.get(id=request.session[u'mongodb_user_id']) except KeyError: mongodb_user = User.objects.get(django_user=request.user.id) # Cache it for next time. request.session[u'mongodb_user_id'] = mongodb_user.id return {u'mongodb_user': mongodb_user} Simplify the context processor. Not very useful anymore, in fact.# -*- coding: utf-8 -*- def mongodb_user(request): """ not the most usefull context manager in the world. """ if request.user.is_anonymous(): return {u'mongodb_user': None} return {u'mongodb_user': request.user.mongo}
<commit_before># -*- coding: utf-8 -*- from .models.nonrel import User def mongodb_user(request): if request.user.is_anonymous(): return {u'mongodb_user': None} try: mongodb_user = User.objects.get(id=request.session[u'mongodb_user_id']) except KeyError: mongodb_user = User.objects.get(django_user=request.user.id) # Cache it for next time. request.session[u'mongodb_user_id'] = mongodb_user.id return {u'mongodb_user': mongodb_user} <commit_msg>Simplify the context processor. Not very useful anymore, in fact.<commit_after># -*- coding: utf-8 -*- def mongodb_user(request): """ not the most usefull context manager in the world. """ if request.user.is_anonymous(): return {u'mongodb_user': None} return {u'mongodb_user': request.user.mongo}
58270d88592e6a097763ce0052ef6a8d22e9bbcb
compose/const.py
compose/const.py
import os import sys DEFAULT_TIMEOUT = 10 IS_WINDOWS_PLATFORM = (sys.platform == "win32") LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number' LABEL_ONE_OFF = 'com.docker.compose.oneoff' LABEL_PROJECT = 'com.docker.compose.project' LABEL_SERVICE = 'com.docker.compose.service' LABEL_VERSION = 'com.docker.compose.version' LABEL_CONFIG_HASH = 'com.docker.compose.config-hash' HTTP_TIMEOUT = int(os.environ.get('COMPOSE_HTTP_TIMEOUT', os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))) IS_WINDOWS_PLATFORM = (sys.platform == 'win32')
import os import sys DEFAULT_TIMEOUT = 10 HTTP_TIMEOUT = int(os.environ.get('COMPOSE_HTTP_TIMEOUT', os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))) IS_WINDOWS_PLATFORM = (sys.platform == "win32") LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number' LABEL_ONE_OFF = 'com.docker.compose.oneoff' LABEL_PROJECT = 'com.docker.compose.project' LABEL_SERVICE = 'com.docker.compose.service' LABEL_VERSION = 'com.docker.compose.version' LABEL_CONFIG_HASH = 'com.docker.compose.config-hash'
Remove duplicate and re-order alphabetically
Remove duplicate and re-order alphabetically Signed-off-by: Mazz Mosley <a54aae760072825ca6733a7dfc4aa39211f100a9@houseofmnowster.com>
Python
apache-2.0
johnstep/docker.github.io,mdaue/compose,danix800/docker.github.io,phiroict/docker,jeanpralo/compose,jzwlqx/denverdino.github.io,jonaseck2/compose,denverdino/compose,docker/docker.github.io,JimGalasyn/docker.github.io,andrewgee/compose,alexandrev/compose,jzwlqx/denverdino.github.io,albers/compose,denverdino/denverdino.github.io,JimGalasyn/docker.github.io,rgbkrk/compose,LuisBosquez/docker.github.io,thaJeztah/docker.github.io,aduermael/docker.github.io,londoncalling/docker.github.io,GM-Alex/compose,JimGalasyn/docker.github.io,denverdino/docker.github.io,joeuo/docker.github.io,alexandrev/compose,ChrisChinchilla/compose,sanscontext/docker.github.io,thaJeztah/docker.github.io,johnstep/docker.github.io,londoncalling/docker.github.io,viranch/compose,michael-k/docker-compose,troy0820/docker.github.io,gdevillele/docker.github.io,danix800/docker.github.io,KevinGreene/compose,phiroict/docker,anweiss/docker.github.io,joaofnfernandes/docker.github.io,funkyfuture/docker-compose,charleswhchan/compose,vdemeester/compose,menglingwei/denverdino.github.io,bdwill/docker.github.io,kojiromike/compose,vdemeester/compose,funkyfuture/docker-compose,thaJeztah/docker.github.io,johnstep/docker.github.io,jeanpralo/compose,tiry/compose,jrabbit/compose,aduermael/docker.github.io,TomasTomecek/compose,charleswhchan/compose,albers/compose,KalleDK/compose,docker-zh/docker.github.io,shin-/docker.github.io,schmunk42/compose,jonaseck2/compose,denverdino/docker.github.io,TomasTomecek/compose,shin-/compose,j-fuentes/compose,dbdd4us/compose,anweiss/docker.github.io,troy0820/docker.github.io,shubheksha/docker.github.io,joeuo/docker.github.io,mdaue/compose,docker-zh/docker.github.io,BSWANG/denverdino.github.io,docker/docker.github.io,joaofnfernandes/docker.github.io,thaJeztah/docker.github.io,ChrisChinchilla/compose,denverdino/denverdino.github.io,thaJeztah/compose,jrabbit/compose,troy0820/docker.github.io,londoncalling/docker.github.io,sdurrheimer/compose,menglingwei/denverdino.github.io,kojiromike/compose,alexisbellido/docker.github.io,BSWANG/denverdino.github.io,denverdino/compose,jzwlqx/denverdino.github.io,denverdino/denverdino.github.io,johnstep/docker.github.io,phiroict/docker,LuisBosquez/docker.github.io,londoncalling/docker.github.io,twitherspoon/compose,tiry/compose,rillig/docker.github.io,au-phiware/compose,JimGalasyn/docker.github.io,BSWANG/denverdino.github.io,gdevillele/docker.github.io,menglingwei/denverdino.github.io,swoopla/compose,bdwill/docker.github.io,LuisBosquez/docker.github.io,joaofnfernandes/docker.github.io,rillig/docker.github.io,moxiegirl/compose,hoogenm/compose,alexisbellido/docker.github.io,swoopla/compose,BSWANG/denverdino.github.io,KevinGreene/compose,denverdino/docker.github.io,danix800/docker.github.io,LuisBosquez/docker.github.io,sanscontext/docker.github.io,denverdino/docker.github.io,phiroict/docker,alexisbellido/docker.github.io,gdevillele/docker.github.io,joaofnfernandes/docker.github.io,michael-k/docker-compose,joaofnfernandes/docker.github.io,troy0820/docker.github.io,rillig/docker.github.io,shin-/docker.github.io,danix800/docker.github.io,aduermael/docker.github.io,docker/docker.github.io,shubheksha/docker.github.io,BSWANG/denverdino.github.io,docker-zh/docker.github.io,bdwill/docker.github.io,mnowster/compose,gdevillele/docker.github.io,menglingwei/denverdino.github.io,dnephin/compose,LuisBosquez/docker.github.io,bdwill/docker.github.io,hoogenm/compose,KalleDK/compose,GM-Alex/compose,joeuo/docker.github.io,au-phiware/compose,schmunk42/compose,joeuo/docker.github.io,thaJeztah/docker.github.io,sdurrheimer/compose,shin-/docker.github.io,twitherspoon/compose,mnowster/compose,j-fuentes/compose,viranch/compose,shin-/docker.github.io,moxiegirl/compose,docker-zh/docker.github.io,rillig/docker.github.io,mrfuxi/compose,anweiss/docker.github.io,londoncalling/docker.github.io,dbdd4us/compose,sanscontext/docker.github.io,jzwlqx/denverdino.github.io,shin-/compose,alexisbellido/docker.github.io,docker-zh/docker.github.io,dnephin/compose,alexisbellido/docker.github.io,denverdino/denverdino.github.io,sanscontext/docker.github.io,menglingwei/denverdino.github.io,sanscontext/docker.github.io,shubheksha/docker.github.io,anweiss/docker.github.io,mrfuxi/compose,thaJeztah/compose,shubheksha/docker.github.io,shin-/docker.github.io,johnstep/docker.github.io,anweiss/docker.github.io,docker/docker.github.io,aduermael/docker.github.io,joeuo/docker.github.io,shubheksha/docker.github.io,JimGalasyn/docker.github.io,phiroict/docker,docker/docker.github.io,gdevillele/docker.github.io,rgbkrk/compose,denverdino/denverdino.github.io,bdwill/docker.github.io,denverdino/docker.github.io,andrewgee/compose,jzwlqx/denverdino.github.io
import os import sys DEFAULT_TIMEOUT = 10 IS_WINDOWS_PLATFORM = (sys.platform == "win32") LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number' LABEL_ONE_OFF = 'com.docker.compose.oneoff' LABEL_PROJECT = 'com.docker.compose.project' LABEL_SERVICE = 'com.docker.compose.service' LABEL_VERSION = 'com.docker.compose.version' LABEL_CONFIG_HASH = 'com.docker.compose.config-hash' HTTP_TIMEOUT = int(os.environ.get('COMPOSE_HTTP_TIMEOUT', os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))) IS_WINDOWS_PLATFORM = (sys.platform == 'win32') Remove duplicate and re-order alphabetically Signed-off-by: Mazz Mosley <a54aae760072825ca6733a7dfc4aa39211f100a9@houseofmnowster.com>
import os import sys DEFAULT_TIMEOUT = 10 HTTP_TIMEOUT = int(os.environ.get('COMPOSE_HTTP_TIMEOUT', os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))) IS_WINDOWS_PLATFORM = (sys.platform == "win32") LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number' LABEL_ONE_OFF = 'com.docker.compose.oneoff' LABEL_PROJECT = 'com.docker.compose.project' LABEL_SERVICE = 'com.docker.compose.service' LABEL_VERSION = 'com.docker.compose.version' LABEL_CONFIG_HASH = 'com.docker.compose.config-hash'
<commit_before>import os import sys DEFAULT_TIMEOUT = 10 IS_WINDOWS_PLATFORM = (sys.platform == "win32") LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number' LABEL_ONE_OFF = 'com.docker.compose.oneoff' LABEL_PROJECT = 'com.docker.compose.project' LABEL_SERVICE = 'com.docker.compose.service' LABEL_VERSION = 'com.docker.compose.version' LABEL_CONFIG_HASH = 'com.docker.compose.config-hash' HTTP_TIMEOUT = int(os.environ.get('COMPOSE_HTTP_TIMEOUT', os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))) IS_WINDOWS_PLATFORM = (sys.platform == 'win32') <commit_msg>Remove duplicate and re-order alphabetically Signed-off-by: Mazz Mosley <a54aae760072825ca6733a7dfc4aa39211f100a9@houseofmnowster.com><commit_after>
import os import sys DEFAULT_TIMEOUT = 10 HTTP_TIMEOUT = int(os.environ.get('COMPOSE_HTTP_TIMEOUT', os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))) IS_WINDOWS_PLATFORM = (sys.platform == "win32") LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number' LABEL_ONE_OFF = 'com.docker.compose.oneoff' LABEL_PROJECT = 'com.docker.compose.project' LABEL_SERVICE = 'com.docker.compose.service' LABEL_VERSION = 'com.docker.compose.version' LABEL_CONFIG_HASH = 'com.docker.compose.config-hash'
import os import sys DEFAULT_TIMEOUT = 10 IS_WINDOWS_PLATFORM = (sys.platform == "win32") LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number' LABEL_ONE_OFF = 'com.docker.compose.oneoff' LABEL_PROJECT = 'com.docker.compose.project' LABEL_SERVICE = 'com.docker.compose.service' LABEL_VERSION = 'com.docker.compose.version' LABEL_CONFIG_HASH = 'com.docker.compose.config-hash' HTTP_TIMEOUT = int(os.environ.get('COMPOSE_HTTP_TIMEOUT', os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))) IS_WINDOWS_PLATFORM = (sys.platform == 'win32') Remove duplicate and re-order alphabetically Signed-off-by: Mazz Mosley <a54aae760072825ca6733a7dfc4aa39211f100a9@houseofmnowster.com>import os import sys DEFAULT_TIMEOUT = 10 HTTP_TIMEOUT = int(os.environ.get('COMPOSE_HTTP_TIMEOUT', os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))) IS_WINDOWS_PLATFORM = (sys.platform == "win32") LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number' LABEL_ONE_OFF = 'com.docker.compose.oneoff' LABEL_PROJECT = 'com.docker.compose.project' LABEL_SERVICE = 'com.docker.compose.service' LABEL_VERSION = 'com.docker.compose.version' LABEL_CONFIG_HASH = 'com.docker.compose.config-hash'
<commit_before>import os import sys DEFAULT_TIMEOUT = 10 IS_WINDOWS_PLATFORM = (sys.platform == "win32") LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number' LABEL_ONE_OFF = 'com.docker.compose.oneoff' LABEL_PROJECT = 'com.docker.compose.project' LABEL_SERVICE = 'com.docker.compose.service' LABEL_VERSION = 'com.docker.compose.version' LABEL_CONFIG_HASH = 'com.docker.compose.config-hash' HTTP_TIMEOUT = int(os.environ.get('COMPOSE_HTTP_TIMEOUT', os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))) IS_WINDOWS_PLATFORM = (sys.platform == 'win32') <commit_msg>Remove duplicate and re-order alphabetically Signed-off-by: Mazz Mosley <a54aae760072825ca6733a7dfc4aa39211f100a9@houseofmnowster.com><commit_after>import os import sys DEFAULT_TIMEOUT = 10 HTTP_TIMEOUT = int(os.environ.get('COMPOSE_HTTP_TIMEOUT', os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))) IS_WINDOWS_PLATFORM = (sys.platform == "win32") LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number' LABEL_ONE_OFF = 'com.docker.compose.oneoff' LABEL_PROJECT = 'com.docker.compose.project' LABEL_SERVICE = 'com.docker.compose.service' LABEL_VERSION = 'com.docker.compose.version' LABEL_CONFIG_HASH = 'com.docker.compose.config-hash'
b0701b50bb5d3dd3a7255ef4cf205f75513d790e
froide/helper/email_sending.py
froide/helper/email_sending.py
from django.core.mail import EmailMessage, get_connection from django.conf import settings try: from froide.bounce.utils import make_bounce_address except ImportError: make_bounce_address = None HANDLE_BOUNCES = settings.FROIDE_CONFIG['bounce_enabled'] def get_mail_connection(**kwargs): return get_connection( backend=settings.EMAIL_BACKEND, **kwargs ) def send_mail(subject, body, user_email, from_email=None, attachments=None, fail_silently=False, bounce_check=True, auto_bounce=True, **kwargs): if not user_email: return if bounce_check: # TODO: Check if this email should be sent pass if from_email is None: from_email = settings.DEFAULT_FROM_EMAIL backend_kwargs = {} if HANDLE_BOUNCES and auto_bounce and make_bounce_address: backend_kwargs['return_path'] = make_bounce_address(user_email) connection = get_mail_connection(**backend_kwargs) email = EmailMessage(subject, body, from_email, [user_email], connection=connection) if attachments is not None: for name, data, mime_type in attachments: email.attach(name, data, mime_type) return email.send(fail_silently=fail_silently)
from django.core.mail import ( EmailMessage, EmailMultiAlternatives, get_connection ) from django.conf import settings try: from froide.bounce.utils import make_bounce_address except ImportError: make_bounce_address = None HANDLE_BOUNCES = settings.FROIDE_CONFIG['bounce_enabled'] def get_mail_connection(**kwargs): return get_connection( backend=settings.EMAIL_BACKEND, **kwargs ) def send_mail(subject, body, user_email, from_email=None, html=None, attachments=None, fail_silently=False, bounce_check=True, headers=None, auto_bounce=True, **kwargs): if not user_email: return if bounce_check: # TODO: Check if this email should be sent pass if from_email is None: from_email = settings.DEFAULT_FROM_EMAIL backend_kwargs = {} if HANDLE_BOUNCES and auto_bounce and make_bounce_address: backend_kwargs['return_path'] = make_bounce_address(user_email) connection = get_mail_connection(**backend_kwargs) if html is None: email_klass = EmailMessage else: email_klass = EmailMultiAlternatives email = email_klass(subject, body, from_email, [user_email], connection=connection, headers=headers) if html is not None: email.attach_alternative( html, "text/html" ) if attachments is not None: for name, data, mime_type in attachments: email.attach(name, data, mime_type) return email.send(fail_silently=fail_silently)
Add support for html emails and extra headers
Add support for html emails and extra headers
Python
mit
fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,fin/froide,stefanw/froide
from django.core.mail import EmailMessage, get_connection from django.conf import settings try: from froide.bounce.utils import make_bounce_address except ImportError: make_bounce_address = None HANDLE_BOUNCES = settings.FROIDE_CONFIG['bounce_enabled'] def get_mail_connection(**kwargs): return get_connection( backend=settings.EMAIL_BACKEND, **kwargs ) def send_mail(subject, body, user_email, from_email=None, attachments=None, fail_silently=False, bounce_check=True, auto_bounce=True, **kwargs): if not user_email: return if bounce_check: # TODO: Check if this email should be sent pass if from_email is None: from_email = settings.DEFAULT_FROM_EMAIL backend_kwargs = {} if HANDLE_BOUNCES and auto_bounce and make_bounce_address: backend_kwargs['return_path'] = make_bounce_address(user_email) connection = get_mail_connection(**backend_kwargs) email = EmailMessage(subject, body, from_email, [user_email], connection=connection) if attachments is not None: for name, data, mime_type in attachments: email.attach(name, data, mime_type) return email.send(fail_silently=fail_silently) Add support for html emails and extra headers
from django.core.mail import ( EmailMessage, EmailMultiAlternatives, get_connection ) from django.conf import settings try: from froide.bounce.utils import make_bounce_address except ImportError: make_bounce_address = None HANDLE_BOUNCES = settings.FROIDE_CONFIG['bounce_enabled'] def get_mail_connection(**kwargs): return get_connection( backend=settings.EMAIL_BACKEND, **kwargs ) def send_mail(subject, body, user_email, from_email=None, html=None, attachments=None, fail_silently=False, bounce_check=True, headers=None, auto_bounce=True, **kwargs): if not user_email: return if bounce_check: # TODO: Check if this email should be sent pass if from_email is None: from_email = settings.DEFAULT_FROM_EMAIL backend_kwargs = {} if HANDLE_BOUNCES and auto_bounce and make_bounce_address: backend_kwargs['return_path'] = make_bounce_address(user_email) connection = get_mail_connection(**backend_kwargs) if html is None: email_klass = EmailMessage else: email_klass = EmailMultiAlternatives email = email_klass(subject, body, from_email, [user_email], connection=connection, headers=headers) if html is not None: email.attach_alternative( html, "text/html" ) if attachments is not None: for name, data, mime_type in attachments: email.attach(name, data, mime_type) return email.send(fail_silently=fail_silently)
<commit_before>from django.core.mail import EmailMessage, get_connection from django.conf import settings try: from froide.bounce.utils import make_bounce_address except ImportError: make_bounce_address = None HANDLE_BOUNCES = settings.FROIDE_CONFIG['bounce_enabled'] def get_mail_connection(**kwargs): return get_connection( backend=settings.EMAIL_BACKEND, **kwargs ) def send_mail(subject, body, user_email, from_email=None, attachments=None, fail_silently=False, bounce_check=True, auto_bounce=True, **kwargs): if not user_email: return if bounce_check: # TODO: Check if this email should be sent pass if from_email is None: from_email = settings.DEFAULT_FROM_EMAIL backend_kwargs = {} if HANDLE_BOUNCES and auto_bounce and make_bounce_address: backend_kwargs['return_path'] = make_bounce_address(user_email) connection = get_mail_connection(**backend_kwargs) email = EmailMessage(subject, body, from_email, [user_email], connection=connection) if attachments is not None: for name, data, mime_type in attachments: email.attach(name, data, mime_type) return email.send(fail_silently=fail_silently) <commit_msg>Add support for html emails and extra headers<commit_after>
from django.core.mail import ( EmailMessage, EmailMultiAlternatives, get_connection ) from django.conf import settings try: from froide.bounce.utils import make_bounce_address except ImportError: make_bounce_address = None HANDLE_BOUNCES = settings.FROIDE_CONFIG['bounce_enabled'] def get_mail_connection(**kwargs): return get_connection( backend=settings.EMAIL_BACKEND, **kwargs ) def send_mail(subject, body, user_email, from_email=None, html=None, attachments=None, fail_silently=False, bounce_check=True, headers=None, auto_bounce=True, **kwargs): if not user_email: return if bounce_check: # TODO: Check if this email should be sent pass if from_email is None: from_email = settings.DEFAULT_FROM_EMAIL backend_kwargs = {} if HANDLE_BOUNCES and auto_bounce and make_bounce_address: backend_kwargs['return_path'] = make_bounce_address(user_email) connection = get_mail_connection(**backend_kwargs) if html is None: email_klass = EmailMessage else: email_klass = EmailMultiAlternatives email = email_klass(subject, body, from_email, [user_email], connection=connection, headers=headers) if html is not None: email.attach_alternative( html, "text/html" ) if attachments is not None: for name, data, mime_type in attachments: email.attach(name, data, mime_type) return email.send(fail_silently=fail_silently)
from django.core.mail import EmailMessage, get_connection from django.conf import settings try: from froide.bounce.utils import make_bounce_address except ImportError: make_bounce_address = None HANDLE_BOUNCES = settings.FROIDE_CONFIG['bounce_enabled'] def get_mail_connection(**kwargs): return get_connection( backend=settings.EMAIL_BACKEND, **kwargs ) def send_mail(subject, body, user_email, from_email=None, attachments=None, fail_silently=False, bounce_check=True, auto_bounce=True, **kwargs): if not user_email: return if bounce_check: # TODO: Check if this email should be sent pass if from_email is None: from_email = settings.DEFAULT_FROM_EMAIL backend_kwargs = {} if HANDLE_BOUNCES and auto_bounce and make_bounce_address: backend_kwargs['return_path'] = make_bounce_address(user_email) connection = get_mail_connection(**backend_kwargs) email = EmailMessage(subject, body, from_email, [user_email], connection=connection) if attachments is not None: for name, data, mime_type in attachments: email.attach(name, data, mime_type) return email.send(fail_silently=fail_silently) Add support for html emails and extra headersfrom django.core.mail import ( EmailMessage, EmailMultiAlternatives, get_connection ) from django.conf import settings try: from froide.bounce.utils import make_bounce_address except ImportError: make_bounce_address = None HANDLE_BOUNCES = settings.FROIDE_CONFIG['bounce_enabled'] def get_mail_connection(**kwargs): return get_connection( backend=settings.EMAIL_BACKEND, **kwargs ) def send_mail(subject, body, user_email, from_email=None, html=None, attachments=None, fail_silently=False, bounce_check=True, headers=None, auto_bounce=True, **kwargs): if not user_email: return if bounce_check: # TODO: Check if this email should be sent pass if from_email is None: from_email = settings.DEFAULT_FROM_EMAIL backend_kwargs = {} if HANDLE_BOUNCES and auto_bounce and make_bounce_address: backend_kwargs['return_path'] = make_bounce_address(user_email) connection = get_mail_connection(**backend_kwargs) if html is None: email_klass = EmailMessage else: email_klass = EmailMultiAlternatives email = email_klass(subject, body, from_email, [user_email], connection=connection, headers=headers) if html is not None: email.attach_alternative( html, "text/html" ) if attachments is not None: for name, data, mime_type in attachments: email.attach(name, data, mime_type) return email.send(fail_silently=fail_silently)
<commit_before>from django.core.mail import EmailMessage, get_connection from django.conf import settings try: from froide.bounce.utils import make_bounce_address except ImportError: make_bounce_address = None HANDLE_BOUNCES = settings.FROIDE_CONFIG['bounce_enabled'] def get_mail_connection(**kwargs): return get_connection( backend=settings.EMAIL_BACKEND, **kwargs ) def send_mail(subject, body, user_email, from_email=None, attachments=None, fail_silently=False, bounce_check=True, auto_bounce=True, **kwargs): if not user_email: return if bounce_check: # TODO: Check if this email should be sent pass if from_email is None: from_email = settings.DEFAULT_FROM_EMAIL backend_kwargs = {} if HANDLE_BOUNCES and auto_bounce and make_bounce_address: backend_kwargs['return_path'] = make_bounce_address(user_email) connection = get_mail_connection(**backend_kwargs) email = EmailMessage(subject, body, from_email, [user_email], connection=connection) if attachments is not None: for name, data, mime_type in attachments: email.attach(name, data, mime_type) return email.send(fail_silently=fail_silently) <commit_msg>Add support for html emails and extra headers<commit_after>from django.core.mail import ( EmailMessage, EmailMultiAlternatives, get_connection ) from django.conf import settings try: from froide.bounce.utils import make_bounce_address except ImportError: make_bounce_address = None HANDLE_BOUNCES = settings.FROIDE_CONFIG['bounce_enabled'] def get_mail_connection(**kwargs): return get_connection( backend=settings.EMAIL_BACKEND, **kwargs ) def send_mail(subject, body, user_email, from_email=None, html=None, attachments=None, fail_silently=False, bounce_check=True, headers=None, auto_bounce=True, **kwargs): if not user_email: return if bounce_check: # TODO: Check if this email should be sent pass if from_email is None: from_email = settings.DEFAULT_FROM_EMAIL backend_kwargs = {} if HANDLE_BOUNCES and auto_bounce and make_bounce_address: backend_kwargs['return_path'] = make_bounce_address(user_email) connection = get_mail_connection(**backend_kwargs) if html is None: email_klass = EmailMessage else: email_klass = EmailMultiAlternatives email = email_klass(subject, body, from_email, [user_email], connection=connection, headers=headers) if html is not None: email.attach_alternative( html, "text/html" ) if attachments is not None: for name, data, mime_type in attachments: email.attach(name, data, mime_type) return email.send(fail_silently=fail_silently)
ea947950d2ee8c6bd9f7693d977f0abfa1410548
migrations/002_add_month_start.py
migrations/002_add_month_start.py
""" Add _week_start_at field to all documents in all collections """ from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) record = Record(document) collection.save(record.to_mongo())
""" Add _week_start_at field to all documents in all collections """ from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) if '_week_start_at' in document: document.pop('_week_start_at') record = Record(document) collection.save(record.to_mongo())
Fix migrations 002 for monthly grouping
Fix migrations 002 for monthly grouping @gtrogers
Python
mit
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
""" Add _week_start_at field to all documents in all collections """ from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) record = Record(document) collection.save(record.to_mongo()) Fix migrations 002 for monthly grouping @gtrogers
""" Add _week_start_at field to all documents in all collections """ from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) if '_week_start_at' in document: document.pop('_week_start_at') record = Record(document) collection.save(record.to_mongo())
<commit_before>""" Add _week_start_at field to all documents in all collections """ from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) record = Record(document) collection.save(record.to_mongo()) <commit_msg>Fix migrations 002 for monthly grouping @gtrogers<commit_after>
""" Add _week_start_at field to all documents in all collections """ from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) if '_week_start_at' in document: document.pop('_week_start_at') record = Record(document) collection.save(record.to_mongo())
""" Add _week_start_at field to all documents in all collections """ from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) record = Record(document) collection.save(record.to_mongo()) Fix migrations 002 for monthly grouping @gtrogers""" Add _week_start_at field to all documents in all collections """ from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) if '_week_start_at' in document: document.pop('_week_start_at') record = Record(document) collection.save(record.to_mongo())
<commit_before>""" Add _week_start_at field to all documents in all collections """ from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) record = Record(document) collection.save(record.to_mongo()) <commit_msg>Fix migrations 002 for monthly grouping @gtrogers<commit_after>""" Add _week_start_at field to all documents in all collections """ from backdrop.core.bucket import utc from backdrop.core.records import Record import logging log = logging.getLogger(__name__) def up(db): for name in db.collection_names(): log.info("Migrating collection: {0}".format(name)) collection = db[name] query = { "_timestamp": {"$exists": True}, "_month_start_at": {"$exists": False} } for document in collection.find(query): document['_timestamp'] = utc(document['_timestamp']) if '_week_start_at' in document: document.pop('_week_start_at') record = Record(document) collection.save(record.to_mongo())
796ac91df7dd4c63e76aa60b8eeec3d12354ecc7
node/sort.py
node/sort.py
#!/usr/bin/env python from nodes import Node class Sort(Node): char = "S" args = 1 results = 1 @Node.test_func([[2,3,4,1]], [[1,2,3,4]]) @Node.test_func(["test"], ["estt"]) def func(self, a: Node.indexable): """sorted(a) - returns the same type as given""" if isinstance(a, tuple): return [tuple(sorted(a))] if isinstance(a, str): return "".join(sorted(a)) return [sorted(a)] @Node.test_func([3], [[1,2]]) def one_range(self, a:int): """range(1,a)""" return [list(range(1,a))]
#!/usr/bin/env python from nodes import Node class Sort(Node): char = "S" args = 1 results = 1 @Node.test_func([[2,3,4,1]], [[1,2,3,4]]) @Node.test_func(["test"], ["estt"]) def func(self, a: Node.indexable): """sorted(a) - returns the same type as given""" if isinstance(a, tuple): return [tuple(sorted(a))] if isinstance(a, str): return "".join(sorted(a)) return [sorted(a)] @Node.test_func([3], [[1,2,3]]) def one_range(self, a:int): """range(1,a)""" return [list(range(1,a+1))]
Change 1 based range so it counts up to n
Change 1 based range so it counts up to n
Python
mit
muddyfish/PYKE,muddyfish/PYKE
#!/usr/bin/env python from nodes import Node class Sort(Node): char = "S" args = 1 results = 1 @Node.test_func([[2,3,4,1]], [[1,2,3,4]]) @Node.test_func(["test"], ["estt"]) def func(self, a: Node.indexable): """sorted(a) - returns the same type as given""" if isinstance(a, tuple): return [tuple(sorted(a))] if isinstance(a, str): return "".join(sorted(a)) return [sorted(a)] @Node.test_func([3], [[1,2]]) def one_range(self, a:int): """range(1,a)""" return [list(range(1,a))] Change 1 based range so it counts up to n
#!/usr/bin/env python from nodes import Node class Sort(Node): char = "S" args = 1 results = 1 @Node.test_func([[2,3,4,1]], [[1,2,3,4]]) @Node.test_func(["test"], ["estt"]) def func(self, a: Node.indexable): """sorted(a) - returns the same type as given""" if isinstance(a, tuple): return [tuple(sorted(a))] if isinstance(a, str): return "".join(sorted(a)) return [sorted(a)] @Node.test_func([3], [[1,2,3]]) def one_range(self, a:int): """range(1,a)""" return [list(range(1,a+1))]
<commit_before>#!/usr/bin/env python from nodes import Node class Sort(Node): char = "S" args = 1 results = 1 @Node.test_func([[2,3,4,1]], [[1,2,3,4]]) @Node.test_func(["test"], ["estt"]) def func(self, a: Node.indexable): """sorted(a) - returns the same type as given""" if isinstance(a, tuple): return [tuple(sorted(a))] if isinstance(a, str): return "".join(sorted(a)) return [sorted(a)] @Node.test_func([3], [[1,2]]) def one_range(self, a:int): """range(1,a)""" return [list(range(1,a))] <commit_msg>Change 1 based range so it counts up to n<commit_after>
#!/usr/bin/env python from nodes import Node class Sort(Node): char = "S" args = 1 results = 1 @Node.test_func([[2,3,4,1]], [[1,2,3,4]]) @Node.test_func(["test"], ["estt"]) def func(self, a: Node.indexable): """sorted(a) - returns the same type as given""" if isinstance(a, tuple): return [tuple(sorted(a))] if isinstance(a, str): return "".join(sorted(a)) return [sorted(a)] @Node.test_func([3], [[1,2,3]]) def one_range(self, a:int): """range(1,a)""" return [list(range(1,a+1))]
#!/usr/bin/env python from nodes import Node class Sort(Node): char = "S" args = 1 results = 1 @Node.test_func([[2,3,4,1]], [[1,2,3,4]]) @Node.test_func(["test"], ["estt"]) def func(self, a: Node.indexable): """sorted(a) - returns the same type as given""" if isinstance(a, tuple): return [tuple(sorted(a))] if isinstance(a, str): return "".join(sorted(a)) return [sorted(a)] @Node.test_func([3], [[1,2]]) def one_range(self, a:int): """range(1,a)""" return [list(range(1,a))] Change 1 based range so it counts up to n#!/usr/bin/env python from nodes import Node class Sort(Node): char = "S" args = 1 results = 1 @Node.test_func([[2,3,4,1]], [[1,2,3,4]]) @Node.test_func(["test"], ["estt"]) def func(self, a: Node.indexable): """sorted(a) - returns the same type as given""" if isinstance(a, tuple): return [tuple(sorted(a))] if isinstance(a, str): return "".join(sorted(a)) return [sorted(a)] @Node.test_func([3], [[1,2,3]]) def one_range(self, a:int): """range(1,a)""" return [list(range(1,a+1))]
<commit_before>#!/usr/bin/env python from nodes import Node class Sort(Node): char = "S" args = 1 results = 1 @Node.test_func([[2,3,4,1]], [[1,2,3,4]]) @Node.test_func(["test"], ["estt"]) def func(self, a: Node.indexable): """sorted(a) - returns the same type as given""" if isinstance(a, tuple): return [tuple(sorted(a))] if isinstance(a, str): return "".join(sorted(a)) return [sorted(a)] @Node.test_func([3], [[1,2]]) def one_range(self, a:int): """range(1,a)""" return [list(range(1,a))] <commit_msg>Change 1 based range so it counts up to n<commit_after>#!/usr/bin/env python from nodes import Node class Sort(Node): char = "S" args = 1 results = 1 @Node.test_func([[2,3,4,1]], [[1,2,3,4]]) @Node.test_func(["test"], ["estt"]) def func(self, a: Node.indexable): """sorted(a) - returns the same type as given""" if isinstance(a, tuple): return [tuple(sorted(a))] if isinstance(a, str): return "".join(sorted(a)) return [sorted(a)] @Node.test_func([3], [[1,2,3]]) def one_range(self, a:int): """range(1,a)""" return [list(range(1,a+1))]
a86c7d9be7b7399b117b1289d6548f50b657efe6
openstack_dashboard/dashboards/project/stacks/resource_types/tables.py
openstack_dashboard/dashboards/project/stacks/resource_types/tables.py
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from django.utils.translation import ugettext_lazy as _ from horizon import tables class ResourceTypesTable(tables.DataTable): class ResourceColumn(tables.Column): def get_raw_data(self, datum): attr_list = ['implementation', 'component', 'resource'] info_list = datum.resource_type.split('::') info_list[0] = info_list[0].replace("OS", "OpenStack") if info_list[0] == "AWS": info_list[0] = _("AWS compatible") info_dict = dict(zip(attr_list, info_list)) return info_dict[self.transform] name = tables.Column("resource_type", verbose_name=_("Type"), link="horizon:project:stacks.resource_types:details",) implementation = ResourceColumn("implementation", verbose_name=_("Implementation"),) component = ResourceColumn("component", verbose_name=_("Component"),) resource = ResourceColumn("resource", verbose_name=_("Resource"),) def get_object_id(self, resource): return resource.resource_type class Meta(object): name = "resource_types" verbose_name = _("Resource Types") table_actions = (tables.FilterAction,) multi_select = False
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from django.utils.translation import ugettext_lazy as _ from horizon import tables class ResourceTypesTable(tables.DataTable): name = tables.Column("resource_type", verbose_name=_("Type"), link="horizon:project:stacks.resource_types:details",) def get_object_id(self, resource): return resource.resource_type class Meta(object): name = "resource_types" verbose_name = _("Resource Types") table_actions = (tables.FilterAction,) multi_select = False
Remove Orchestration Resource Types names restriction
Remove Orchestration Resource Types names restriction The additional columns "Implementation", "Component" and "Resource" are representative for a limited resource type group only. Resource type name can have less or more than three words and Heat even allows to specify a URL as a resource type. Horizon should not use these columns at all: "Type" column and filter will do just the same trick. Change-Id: I38a671490b90122e2d75e6aa11d3de0fa12817c9 Closes-Bug: #1614000
Python
apache-2.0
yeming233/horizon,openstack/horizon,bac/horizon,BiznetGIO/horizon,noironetworks/horizon,coreycb/horizon,sandvine/horizon,noironetworks/horizon,yeming233/horizon,ChameleonCloud/horizon,openstack/horizon,openstack/horizon,sandvine/horizon,openstack/horizon,NeCTAR-RC/horizon,yeming233/horizon,bac/horizon,sandvine/horizon,sandvine/horizon,ChameleonCloud/horizon,coreycb/horizon,BiznetGIO/horizon,BiznetGIO/horizon,BiznetGIO/horizon,ChameleonCloud/horizon,noironetworks/horizon,bac/horizon,coreycb/horizon,yeming233/horizon,coreycb/horizon,NeCTAR-RC/horizon,noironetworks/horizon,ChameleonCloud/horizon,NeCTAR-RC/horizon,bac/horizon,NeCTAR-RC/horizon
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from django.utils.translation import ugettext_lazy as _ from horizon import tables class ResourceTypesTable(tables.DataTable): class ResourceColumn(tables.Column): def get_raw_data(self, datum): attr_list = ['implementation', 'component', 'resource'] info_list = datum.resource_type.split('::') info_list[0] = info_list[0].replace("OS", "OpenStack") if info_list[0] == "AWS": info_list[0] = _("AWS compatible") info_dict = dict(zip(attr_list, info_list)) return info_dict[self.transform] name = tables.Column("resource_type", verbose_name=_("Type"), link="horizon:project:stacks.resource_types:details",) implementation = ResourceColumn("implementation", verbose_name=_("Implementation"),) component = ResourceColumn("component", verbose_name=_("Component"),) resource = ResourceColumn("resource", verbose_name=_("Resource"),) def get_object_id(self, resource): return resource.resource_type class Meta(object): name = "resource_types" verbose_name = _("Resource Types") table_actions = (tables.FilterAction,) multi_select = False Remove Orchestration Resource Types names restriction The additional columns "Implementation", "Component" and "Resource" are representative for a limited resource type group only. Resource type name can have less or more than three words and Heat even allows to specify a URL as a resource type. Horizon should not use these columns at all: "Type" column and filter will do just the same trick. Change-Id: I38a671490b90122e2d75e6aa11d3de0fa12817c9 Closes-Bug: #1614000
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from django.utils.translation import ugettext_lazy as _ from horizon import tables class ResourceTypesTable(tables.DataTable): name = tables.Column("resource_type", verbose_name=_("Type"), link="horizon:project:stacks.resource_types:details",) def get_object_id(self, resource): return resource.resource_type class Meta(object): name = "resource_types" verbose_name = _("Resource Types") table_actions = (tables.FilterAction,) multi_select = False
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from django.utils.translation import ugettext_lazy as _ from horizon import tables class ResourceTypesTable(tables.DataTable): class ResourceColumn(tables.Column): def get_raw_data(self, datum): attr_list = ['implementation', 'component', 'resource'] info_list = datum.resource_type.split('::') info_list[0] = info_list[0].replace("OS", "OpenStack") if info_list[0] == "AWS": info_list[0] = _("AWS compatible") info_dict = dict(zip(attr_list, info_list)) return info_dict[self.transform] name = tables.Column("resource_type", verbose_name=_("Type"), link="horizon:project:stacks.resource_types:details",) implementation = ResourceColumn("implementation", verbose_name=_("Implementation"),) component = ResourceColumn("component", verbose_name=_("Component"),) resource = ResourceColumn("resource", verbose_name=_("Resource"),) def get_object_id(self, resource): return resource.resource_type class Meta(object): name = "resource_types" verbose_name = _("Resource Types") table_actions = (tables.FilterAction,) multi_select = False <commit_msg>Remove Orchestration Resource Types names restriction The additional columns "Implementation", "Component" and "Resource" are representative for a limited resource type group only. Resource type name can have less or more than three words and Heat even allows to specify a URL as a resource type. Horizon should not use these columns at all: "Type" column and filter will do just the same trick. Change-Id: I38a671490b90122e2d75e6aa11d3de0fa12817c9 Closes-Bug: #1614000<commit_after>
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from django.utils.translation import ugettext_lazy as _ from horizon import tables class ResourceTypesTable(tables.DataTable): name = tables.Column("resource_type", verbose_name=_("Type"), link="horizon:project:stacks.resource_types:details",) def get_object_id(self, resource): return resource.resource_type class Meta(object): name = "resource_types" verbose_name = _("Resource Types") table_actions = (tables.FilterAction,) multi_select = False
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from django.utils.translation import ugettext_lazy as _ from horizon import tables class ResourceTypesTable(tables.DataTable): class ResourceColumn(tables.Column): def get_raw_data(self, datum): attr_list = ['implementation', 'component', 'resource'] info_list = datum.resource_type.split('::') info_list[0] = info_list[0].replace("OS", "OpenStack") if info_list[0] == "AWS": info_list[0] = _("AWS compatible") info_dict = dict(zip(attr_list, info_list)) return info_dict[self.transform] name = tables.Column("resource_type", verbose_name=_("Type"), link="horizon:project:stacks.resource_types:details",) implementation = ResourceColumn("implementation", verbose_name=_("Implementation"),) component = ResourceColumn("component", verbose_name=_("Component"),) resource = ResourceColumn("resource", verbose_name=_("Resource"),) def get_object_id(self, resource): return resource.resource_type class Meta(object): name = "resource_types" verbose_name = _("Resource Types") table_actions = (tables.FilterAction,) multi_select = False Remove Orchestration Resource Types names restriction The additional columns "Implementation", "Component" and "Resource" are representative for a limited resource type group only. Resource type name can have less or more than three words and Heat even allows to specify a URL as a resource type. Horizon should not use these columns at all: "Type" column and filter will do just the same trick. Change-Id: I38a671490b90122e2d75e6aa11d3de0fa12817c9 Closes-Bug: #1614000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from django.utils.translation import ugettext_lazy as _ from horizon import tables class ResourceTypesTable(tables.DataTable): name = tables.Column("resource_type", verbose_name=_("Type"), link="horizon:project:stacks.resource_types:details",) def get_object_id(self, resource): return resource.resource_type class Meta(object): name = "resource_types" verbose_name = _("Resource Types") table_actions = (tables.FilterAction,) multi_select = False
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from django.utils.translation import ugettext_lazy as _ from horizon import tables class ResourceTypesTable(tables.DataTable): class ResourceColumn(tables.Column): def get_raw_data(self, datum): attr_list = ['implementation', 'component', 'resource'] info_list = datum.resource_type.split('::') info_list[0] = info_list[0].replace("OS", "OpenStack") if info_list[0] == "AWS": info_list[0] = _("AWS compatible") info_dict = dict(zip(attr_list, info_list)) return info_dict[self.transform] name = tables.Column("resource_type", verbose_name=_("Type"), link="horizon:project:stacks.resource_types:details",) implementation = ResourceColumn("implementation", verbose_name=_("Implementation"),) component = ResourceColumn("component", verbose_name=_("Component"),) resource = ResourceColumn("resource", verbose_name=_("Resource"),) def get_object_id(self, resource): return resource.resource_type class Meta(object): name = "resource_types" verbose_name = _("Resource Types") table_actions = (tables.FilterAction,) multi_select = False <commit_msg>Remove Orchestration Resource Types names restriction The additional columns "Implementation", "Component" and "Resource" are representative for a limited resource type group only. Resource type name can have less or more than three words and Heat even allows to specify a URL as a resource type. Horizon should not use these columns at all: "Type" column and filter will do just the same trick. Change-Id: I38a671490b90122e2d75e6aa11d3de0fa12817c9 Closes-Bug: #1614000<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from django.utils.translation import ugettext_lazy as _ from horizon import tables class ResourceTypesTable(tables.DataTable): name = tables.Column("resource_type", verbose_name=_("Type"), link="horizon:project:stacks.resource_types:details",) def get_object_id(self, resource): return resource.resource_type class Meta(object): name = "resource_types" verbose_name = _("Resource Types") table_actions = (tables.FilterAction,) multi_select = False
4c39b8691762596c13cf197305c05fde5d4c3b5f
app.py
app.py
import sys from flask import Flask, render_template, jsonify, request from digitalocean import SSHKey, Manager app = Flask(__name__) manager = Manager(token="24611cca29682d3d54f8208b67a47dbe8b6ea01b2c8103ba61150ece4b6259b6") my_droplets = manager.get_all_droplets() # Check for success print(my_droplets) # Get from Chrome extension user_ssh_key = '' key = SSHKey(token='secretspecialuniquesnowflake', name='uniquehostname', public_key=user_ssh_key) # key is created succesfully. key.create() # Create Droplet keys = manager.get_all_sshkeys() droplet = digitalocean.Droplet(token="secretspecialuniquesnowflake", name='DropletWithSSHKeys', region='ams3', # Amster image='ubuntu-14-04-x64', # Ubuntu 14.04 x64 size_slug='512mb', # 512MB ssh_keys=keys, #Automatic conversion backups=False) droplet.create() # Instantiate ``api`` object to setup authentication for DO API. @app.route('/') def index(): return 'Hello World' def main(): app.run(host="0.0.0.0", debug=True) if __name__ == '__main__': sys.exit(main())
import sys from flask import Flask, render_template, jsonify, request import subprocess from digitalocean import SSHKey, Manager app = Flask(__name__) @app.route('/login') def login(): # Get from Chrome extension token = request.args.get('token') manager = Manager(token=token) # Instantiate ``api`` object to setup authentication for DO API. my_droplets = manager.get_all_droplets() # Check for success print(my_droplets) user_ssh_key = request.args.get('ssh') key = SSHKey(token='secretspecialuniquesnowflake', name='uniquehostname', public_key=user_ssh_key) # key is created succesfully. key.create() @app.route('/create') def create(): # Create Droplet keys = manager.get_all_sshkeys() droplet = digitalocean.Droplet(token="secretspecialuniquesnowflake", name='DropletWithSSHKeys', region='ams3', # Amster image='ubuntu-14-04-x64', # Ubuntu 14.04 x64 size_slug='512mb', # 512MB ssh_keys=keys, #Automatic conversion backups=False) droplet.create() @app.route('/') def index(): return 'Hello World' def main(): app.run(host="0.0.0.0", debug=True) if __name__ == '__main__': sys.exit(main())
Create /login, /create routes for API
Create /login, /create routes for API
Python
mit
CapsLockHacks/do-server
import sys from flask import Flask, render_template, jsonify, request from digitalocean import SSHKey, Manager app = Flask(__name__) manager = Manager(token="24611cca29682d3d54f8208b67a47dbe8b6ea01b2c8103ba61150ece4b6259b6") my_droplets = manager.get_all_droplets() # Check for success print(my_droplets) # Get from Chrome extension user_ssh_key = '' key = SSHKey(token='secretspecialuniquesnowflake', name='uniquehostname', public_key=user_ssh_key) # key is created succesfully. key.create() # Create Droplet keys = manager.get_all_sshkeys() droplet = digitalocean.Droplet(token="secretspecialuniquesnowflake", name='DropletWithSSHKeys', region='ams3', # Amster image='ubuntu-14-04-x64', # Ubuntu 14.04 x64 size_slug='512mb', # 512MB ssh_keys=keys, #Automatic conversion backups=False) droplet.create() # Instantiate ``api`` object to setup authentication for DO API. @app.route('/') def index(): return 'Hello World' def main(): app.run(host="0.0.0.0", debug=True) if __name__ == '__main__': sys.exit(main())Create /login, /create routes for API
import sys from flask import Flask, render_template, jsonify, request import subprocess from digitalocean import SSHKey, Manager app = Flask(__name__) @app.route('/login') def login(): # Get from Chrome extension token = request.args.get('token') manager = Manager(token=token) # Instantiate ``api`` object to setup authentication for DO API. my_droplets = manager.get_all_droplets() # Check for success print(my_droplets) user_ssh_key = request.args.get('ssh') key = SSHKey(token='secretspecialuniquesnowflake', name='uniquehostname', public_key=user_ssh_key) # key is created succesfully. key.create() @app.route('/create') def create(): # Create Droplet keys = manager.get_all_sshkeys() droplet = digitalocean.Droplet(token="secretspecialuniquesnowflake", name='DropletWithSSHKeys', region='ams3', # Amster image='ubuntu-14-04-x64', # Ubuntu 14.04 x64 size_slug='512mb', # 512MB ssh_keys=keys, #Automatic conversion backups=False) droplet.create() @app.route('/') def index(): return 'Hello World' def main(): app.run(host="0.0.0.0", debug=True) if __name__ == '__main__': sys.exit(main())
<commit_before>import sys from flask import Flask, render_template, jsonify, request from digitalocean import SSHKey, Manager app = Flask(__name__) manager = Manager(token="24611cca29682d3d54f8208b67a47dbe8b6ea01b2c8103ba61150ece4b6259b6") my_droplets = manager.get_all_droplets() # Check for success print(my_droplets) # Get from Chrome extension user_ssh_key = '' key = SSHKey(token='secretspecialuniquesnowflake', name='uniquehostname', public_key=user_ssh_key) # key is created succesfully. key.create() # Create Droplet keys = manager.get_all_sshkeys() droplet = digitalocean.Droplet(token="secretspecialuniquesnowflake", name='DropletWithSSHKeys', region='ams3', # Amster image='ubuntu-14-04-x64', # Ubuntu 14.04 x64 size_slug='512mb', # 512MB ssh_keys=keys, #Automatic conversion backups=False) droplet.create() # Instantiate ``api`` object to setup authentication for DO API. @app.route('/') def index(): return 'Hello World' def main(): app.run(host="0.0.0.0", debug=True) if __name__ == '__main__': sys.exit(main())<commit_msg>Create /login, /create routes for API<commit_after>
import sys from flask import Flask, render_template, jsonify, request import subprocess from digitalocean import SSHKey, Manager app = Flask(__name__) @app.route('/login') def login(): # Get from Chrome extension token = request.args.get('token') manager = Manager(token=token) # Instantiate ``api`` object to setup authentication for DO API. my_droplets = manager.get_all_droplets() # Check for success print(my_droplets) user_ssh_key = request.args.get('ssh') key = SSHKey(token='secretspecialuniquesnowflake', name='uniquehostname', public_key=user_ssh_key) # key is created succesfully. key.create() @app.route('/create') def create(): # Create Droplet keys = manager.get_all_sshkeys() droplet = digitalocean.Droplet(token="secretspecialuniquesnowflake", name='DropletWithSSHKeys', region='ams3', # Amster image='ubuntu-14-04-x64', # Ubuntu 14.04 x64 size_slug='512mb', # 512MB ssh_keys=keys, #Automatic conversion backups=False) droplet.create() @app.route('/') def index(): return 'Hello World' def main(): app.run(host="0.0.0.0", debug=True) if __name__ == '__main__': sys.exit(main())
import sys from flask import Flask, render_template, jsonify, request from digitalocean import SSHKey, Manager app = Flask(__name__) manager = Manager(token="24611cca29682d3d54f8208b67a47dbe8b6ea01b2c8103ba61150ece4b6259b6") my_droplets = manager.get_all_droplets() # Check for success print(my_droplets) # Get from Chrome extension user_ssh_key = '' key = SSHKey(token='secretspecialuniquesnowflake', name='uniquehostname', public_key=user_ssh_key) # key is created succesfully. key.create() # Create Droplet keys = manager.get_all_sshkeys() droplet = digitalocean.Droplet(token="secretspecialuniquesnowflake", name='DropletWithSSHKeys', region='ams3', # Amster image='ubuntu-14-04-x64', # Ubuntu 14.04 x64 size_slug='512mb', # 512MB ssh_keys=keys, #Automatic conversion backups=False) droplet.create() # Instantiate ``api`` object to setup authentication for DO API. @app.route('/') def index(): return 'Hello World' def main(): app.run(host="0.0.0.0", debug=True) if __name__ == '__main__': sys.exit(main())Create /login, /create routes for APIimport sys from flask import Flask, render_template, jsonify, request import subprocess from digitalocean import SSHKey, Manager app = Flask(__name__) @app.route('/login') def login(): # Get from Chrome extension token = request.args.get('token') manager = Manager(token=token) # Instantiate ``api`` object to setup authentication for DO API. my_droplets = manager.get_all_droplets() # Check for success print(my_droplets) user_ssh_key = request.args.get('ssh') key = SSHKey(token='secretspecialuniquesnowflake', name='uniquehostname', public_key=user_ssh_key) # key is created succesfully. key.create() @app.route('/create') def create(): # Create Droplet keys = manager.get_all_sshkeys() droplet = digitalocean.Droplet(token="secretspecialuniquesnowflake", name='DropletWithSSHKeys', region='ams3', # Amster image='ubuntu-14-04-x64', # Ubuntu 14.04 x64 size_slug='512mb', # 512MB ssh_keys=keys, #Automatic conversion backups=False) droplet.create() @app.route('/') def index(): return 'Hello World' def main(): app.run(host="0.0.0.0", debug=True) if __name__ == '__main__': sys.exit(main())
<commit_before>import sys from flask import Flask, render_template, jsonify, request from digitalocean import SSHKey, Manager app = Flask(__name__) manager = Manager(token="24611cca29682d3d54f8208b67a47dbe8b6ea01b2c8103ba61150ece4b6259b6") my_droplets = manager.get_all_droplets() # Check for success print(my_droplets) # Get from Chrome extension user_ssh_key = '' key = SSHKey(token='secretspecialuniquesnowflake', name='uniquehostname', public_key=user_ssh_key) # key is created succesfully. key.create() # Create Droplet keys = manager.get_all_sshkeys() droplet = digitalocean.Droplet(token="secretspecialuniquesnowflake", name='DropletWithSSHKeys', region='ams3', # Amster image='ubuntu-14-04-x64', # Ubuntu 14.04 x64 size_slug='512mb', # 512MB ssh_keys=keys, #Automatic conversion backups=False) droplet.create() # Instantiate ``api`` object to setup authentication for DO API. @app.route('/') def index(): return 'Hello World' def main(): app.run(host="0.0.0.0", debug=True) if __name__ == '__main__': sys.exit(main())<commit_msg>Create /login, /create routes for API<commit_after>import sys from flask import Flask, render_template, jsonify, request import subprocess from digitalocean import SSHKey, Manager app = Flask(__name__) @app.route('/login') def login(): # Get from Chrome extension token = request.args.get('token') manager = Manager(token=token) # Instantiate ``api`` object to setup authentication for DO API. my_droplets = manager.get_all_droplets() # Check for success print(my_droplets) user_ssh_key = request.args.get('ssh') key = SSHKey(token='secretspecialuniquesnowflake', name='uniquehostname', public_key=user_ssh_key) # key is created succesfully. key.create() @app.route('/create') def create(): # Create Droplet keys = manager.get_all_sshkeys() droplet = digitalocean.Droplet(token="secretspecialuniquesnowflake", name='DropletWithSSHKeys', region='ams3', # Amster image='ubuntu-14-04-x64', # Ubuntu 14.04 x64 size_slug='512mb', # 512MB ssh_keys=keys, #Automatic conversion backups=False) droplet.create() @app.route('/') def index(): return 'Hello World' def main(): app.run(host="0.0.0.0", debug=True) if __name__ == '__main__': sys.exit(main())
2a93ed05a95aad9a27362f24abc766d9d1fc19fe
tests/functional/preview_and_dev/test_email_auth.py
tests/functional/preview_and_dev/test_email_auth.py
from tests.test_utils import recordtime from tests.pages.rollups import sign_in_email_auth @recordtime def test_email_auth(driver, profile, base_url): # login email auth user sign_in_email_auth(driver, profile) # assert url is research mode service's dashboard assert driver.current_url == base_url + '/services/{}/dashboard'.format(profile.notify_research_service_id)
from tests.test_utils import recordtime from tests.pages.rollups import sign_in_email_auth @recordtime def test_email_auth(driver, profile, base_url): # login email auth user sign_in_email_auth(driver, profile) # assert url is research mode service's dashboard assert ( driver.current_url == base_url + '/services/{}/dashboard'.format(profile.notify_research_service_id) ) or ( driver.current_url == base_url + '/services/{}'.format(profile.notify_research_service_id) )
Update tests for new dashboard URL
Update tests for new dashboard URL Includes both so we can migrate from one to the other. Currently blocking admin deploy.
Python
mit
alphagov/notifications-functional-tests,alphagov/notifications-functional-tests
from tests.test_utils import recordtime from tests.pages.rollups import sign_in_email_auth @recordtime def test_email_auth(driver, profile, base_url): # login email auth user sign_in_email_auth(driver, profile) # assert url is research mode service's dashboard assert driver.current_url == base_url + '/services/{}/dashboard'.format(profile.notify_research_service_id) Update tests for new dashboard URL Includes both so we can migrate from one to the other. Currently blocking admin deploy.
from tests.test_utils import recordtime from tests.pages.rollups import sign_in_email_auth @recordtime def test_email_auth(driver, profile, base_url): # login email auth user sign_in_email_auth(driver, profile) # assert url is research mode service's dashboard assert ( driver.current_url == base_url + '/services/{}/dashboard'.format(profile.notify_research_service_id) ) or ( driver.current_url == base_url + '/services/{}'.format(profile.notify_research_service_id) )
<commit_before>from tests.test_utils import recordtime from tests.pages.rollups import sign_in_email_auth @recordtime def test_email_auth(driver, profile, base_url): # login email auth user sign_in_email_auth(driver, profile) # assert url is research mode service's dashboard assert driver.current_url == base_url + '/services/{}/dashboard'.format(profile.notify_research_service_id) <commit_msg>Update tests for new dashboard URL Includes both so we can migrate from one to the other. Currently blocking admin deploy.<commit_after>
from tests.test_utils import recordtime from tests.pages.rollups import sign_in_email_auth @recordtime def test_email_auth(driver, profile, base_url): # login email auth user sign_in_email_auth(driver, profile) # assert url is research mode service's dashboard assert ( driver.current_url == base_url + '/services/{}/dashboard'.format(profile.notify_research_service_id) ) or ( driver.current_url == base_url + '/services/{}'.format(profile.notify_research_service_id) )
from tests.test_utils import recordtime from tests.pages.rollups import sign_in_email_auth @recordtime def test_email_auth(driver, profile, base_url): # login email auth user sign_in_email_auth(driver, profile) # assert url is research mode service's dashboard assert driver.current_url == base_url + '/services/{}/dashboard'.format(profile.notify_research_service_id) Update tests for new dashboard URL Includes both so we can migrate from one to the other. Currently blocking admin deploy.from tests.test_utils import recordtime from tests.pages.rollups import sign_in_email_auth @recordtime def test_email_auth(driver, profile, base_url): # login email auth user sign_in_email_auth(driver, profile) # assert url is research mode service's dashboard assert ( driver.current_url == base_url + '/services/{}/dashboard'.format(profile.notify_research_service_id) ) or ( driver.current_url == base_url + '/services/{}'.format(profile.notify_research_service_id) )
<commit_before>from tests.test_utils import recordtime from tests.pages.rollups import sign_in_email_auth @recordtime def test_email_auth(driver, profile, base_url): # login email auth user sign_in_email_auth(driver, profile) # assert url is research mode service's dashboard assert driver.current_url == base_url + '/services/{}/dashboard'.format(profile.notify_research_service_id) <commit_msg>Update tests for new dashboard URL Includes both so we can migrate from one to the other. Currently blocking admin deploy.<commit_after>from tests.test_utils import recordtime from tests.pages.rollups import sign_in_email_auth @recordtime def test_email_auth(driver, profile, base_url): # login email auth user sign_in_email_auth(driver, profile) # assert url is research mode service's dashboard assert ( driver.current_url == base_url + '/services/{}/dashboard'.format(profile.notify_research_service_id) ) or ( driver.current_url == base_url + '/services/{}'.format(profile.notify_research_service_id) )
ac7102a85a30754d31d941395613b63574bfe026
xunit-autolabeler-v2/ast_parser/python_bootstrap.py
xunit-autolabeler-v2/ast_parser/python_bootstrap.py
#!/usr/bin/env python3.8 # ^ Use python 3.8 since Pip isn't configured for newer versions (3.9+) # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import sys from python import invoker if len(sys.argv) != 2: raise ValueError('Please specify exactly one [root] directory.') root_dir = sys.argv[1] output_path = os.path.join(root_dir, 'polyglot_snippet_data.json') json_array = invoker.get_json_for_dir(root_dir) with open(output_path, 'w') as file: json.dump(json_array, file) print(f'JSON written to: {output_path}') print('Do not move this file!')
#!/usr/bin/env python3 # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import sys from python import invoker if len(sys.argv) != 2: raise ValueError('Please specify exactly one [root] directory.') root_dir = sys.argv[1] output_path = os.path.join(root_dir, 'polyglot_snippet_data.json') json_array = invoker.get_json_for_dir(root_dir) with open(output_path, 'w') as file: json.dump(json_array, file) print(f'JSON written to: {output_path}') print('Do not move this file!')
Revert "Backdate python version to fix tests"
Revert "Backdate python version to fix tests" This reverts commit dee546098a383df0b4f38324ecac9482c74cb2ae.
Python
apache-2.0
GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground
#!/usr/bin/env python3.8 # ^ Use python 3.8 since Pip isn't configured for newer versions (3.9+) # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import sys from python import invoker if len(sys.argv) != 2: raise ValueError('Please specify exactly one [root] directory.') root_dir = sys.argv[1] output_path = os.path.join(root_dir, 'polyglot_snippet_data.json') json_array = invoker.get_json_for_dir(root_dir) with open(output_path, 'w') as file: json.dump(json_array, file) print(f'JSON written to: {output_path}') print('Do not move this file!') Revert "Backdate python version to fix tests" This reverts commit dee546098a383df0b4f38324ecac9482c74cb2ae.
#!/usr/bin/env python3 # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import sys from python import invoker if len(sys.argv) != 2: raise ValueError('Please specify exactly one [root] directory.') root_dir = sys.argv[1] output_path = os.path.join(root_dir, 'polyglot_snippet_data.json') json_array = invoker.get_json_for_dir(root_dir) with open(output_path, 'w') as file: json.dump(json_array, file) print(f'JSON written to: {output_path}') print('Do not move this file!')
<commit_before>#!/usr/bin/env python3.8 # ^ Use python 3.8 since Pip isn't configured for newer versions (3.9+) # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import sys from python import invoker if len(sys.argv) != 2: raise ValueError('Please specify exactly one [root] directory.') root_dir = sys.argv[1] output_path = os.path.join(root_dir, 'polyglot_snippet_data.json') json_array = invoker.get_json_for_dir(root_dir) with open(output_path, 'w') as file: json.dump(json_array, file) print(f'JSON written to: {output_path}') print('Do not move this file!') <commit_msg>Revert "Backdate python version to fix tests" This reverts commit dee546098a383df0b4f38324ecac9482c74cb2ae.<commit_after>
#!/usr/bin/env python3 # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import sys from python import invoker if len(sys.argv) != 2: raise ValueError('Please specify exactly one [root] directory.') root_dir = sys.argv[1] output_path = os.path.join(root_dir, 'polyglot_snippet_data.json') json_array = invoker.get_json_for_dir(root_dir) with open(output_path, 'w') as file: json.dump(json_array, file) print(f'JSON written to: {output_path}') print('Do not move this file!')
#!/usr/bin/env python3.8 # ^ Use python 3.8 since Pip isn't configured for newer versions (3.9+) # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import sys from python import invoker if len(sys.argv) != 2: raise ValueError('Please specify exactly one [root] directory.') root_dir = sys.argv[1] output_path = os.path.join(root_dir, 'polyglot_snippet_data.json') json_array = invoker.get_json_for_dir(root_dir) with open(output_path, 'w') as file: json.dump(json_array, file) print(f'JSON written to: {output_path}') print('Do not move this file!') Revert "Backdate python version to fix tests" This reverts commit dee546098a383df0b4f38324ecac9482c74cb2ae.#!/usr/bin/env python3 # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import sys from python import invoker if len(sys.argv) != 2: raise ValueError('Please specify exactly one [root] directory.') root_dir = sys.argv[1] output_path = os.path.join(root_dir, 'polyglot_snippet_data.json') json_array = invoker.get_json_for_dir(root_dir) with open(output_path, 'w') as file: json.dump(json_array, file) print(f'JSON written to: {output_path}') print('Do not move this file!')
<commit_before>#!/usr/bin/env python3.8 # ^ Use python 3.8 since Pip isn't configured for newer versions (3.9+) # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import sys from python import invoker if len(sys.argv) != 2: raise ValueError('Please specify exactly one [root] directory.') root_dir = sys.argv[1] output_path = os.path.join(root_dir, 'polyglot_snippet_data.json') json_array = invoker.get_json_for_dir(root_dir) with open(output_path, 'w') as file: json.dump(json_array, file) print(f'JSON written to: {output_path}') print('Do not move this file!') <commit_msg>Revert "Backdate python version to fix tests" This reverts commit dee546098a383df0b4f38324ecac9482c74cb2ae.<commit_after>#!/usr/bin/env python3 # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import os import sys from python import invoker if len(sys.argv) != 2: raise ValueError('Please specify exactly one [root] directory.') root_dir = sys.argv[1] output_path = os.path.join(root_dir, 'polyglot_snippet_data.json') json_array = invoker.get_json_for_dir(root_dir) with open(output_path, 'w') as file: json.dump(json_array, file) print(f'JSON written to: {output_path}') print('Do not move this file!')
8d167a4db654dab46a0afbdd620349db9c68dc82
lcp/settings/staging.py
lcp/settings/staging.py
import os from lcp.settings.base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }
import os from lcp.settings.base import * # noqa # FIXME: The wildcard is only here while testing on Vagrant. # Host header checking fails without it. ALLOWED_HOSTS = ['*'] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }
Set temporary ALLOWED_HOSTS for Vagrant testing.
Set temporary ALLOWED_HOSTS for Vagrant testing.
Python
bsd-2-clause
mblayman/lcp,mblayman/lcp,mblayman/lcp
import os from lcp.settings.base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } Set temporary ALLOWED_HOSTS for Vagrant testing.
import os from lcp.settings.base import * # noqa # FIXME: The wildcard is only here while testing on Vagrant. # Host header checking fails without it. ALLOWED_HOSTS = ['*'] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }
<commit_before>import os from lcp.settings.base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } <commit_msg>Set temporary ALLOWED_HOSTS for Vagrant testing.<commit_after>
import os from lcp.settings.base import * # noqa # FIXME: The wildcard is only here while testing on Vagrant. # Host header checking fails without it. ALLOWED_HOSTS = ['*'] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }
import os from lcp.settings.base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } Set temporary ALLOWED_HOSTS for Vagrant testing.import os from lcp.settings.base import * # noqa # FIXME: The wildcard is only here while testing on Vagrant. # Host header checking fails without it. ALLOWED_HOSTS = ['*'] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }
<commit_before>import os from lcp.settings.base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } <commit_msg>Set temporary ALLOWED_HOSTS for Vagrant testing.<commit_after>import os from lcp.settings.base import * # noqa # FIXME: The wildcard is only here while testing on Vagrant. # Host header checking fails without it. ALLOWED_HOSTS = ['*'] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }
de35aee09f153999e0ae59879b2c57069616509b
polyaxon/polyaxon/config_settings/versions.py
polyaxon/polyaxon/config_settings/versions.py
from polyaxon.utils import config CLI_MIN_VERSION = config.get_string('POLYAXON_CLI_MIN_VERSION', is_optional=True, default='0.0.0') CLI_LATEST_VERSION = config.get_string('POLYAXON_CLI_LATEST_VERSION', is_optional=True, default='0.0.0') PLATFORM_MIN_VERSION = config.get_string('POLYAXON_PLATFORM_MIN_VERSION', is_optional=True, default='0.0.0') PLATFORM_LATEST_VERSION = config.get_string('POLYAXON_PLATFORM_LATEST_VERSION', is_optional=True, default='0.0.0') LIB_MIN_VERSION = config.get_string('POLYAXON_LIB_MIN_VERSION', is_optional=True, default='0.0.0') LIB_LATEST_VERSION = config.get_string('POLYAXON_LIB_LATEST_VERSION', is_optional=True, default='0.0.0') CHART_VERSION = config.get_string('POLYAXON_CHART_VERSION', is_optional=True, default='0.0.0') CHART_IS_UPGRADE = config.get_string('POLYAXON_CHART_IS_UPGRADE')
from polyaxon.utils import config CLI_MIN_VERSION = config.get_string('POLYAXON_CLI_MIN_VERSION', is_optional=True, default='0.0.0') CLI_LATEST_VERSION = config.get_string('POLYAXON_CLI_LATEST_VERSION', is_optional=True, default='0.0.0') PLATFORM_MIN_VERSION = config.get_string('POLYAXON_PLATFORM_MIN_VERSION', is_optional=True, default='0.0.0') PLATFORM_LATEST_VERSION = config.get_string('POLYAXON_PLATFORM_LATEST_VERSION', is_optional=True, default='0.0.0') LIB_MIN_VERSION = config.get_string('POLYAXON_LIB_MIN_VERSION', is_optional=True, default='0.0.0') LIB_LATEST_VERSION = config.get_string('POLYAXON_LIB_LATEST_VERSION', is_optional=True, default='0.0.0') CHART_VERSION = config.get_string('POLYAXON_CHART_VERSION', is_optional=True, default='0.0.0') CHART_IS_UPGRADE = config.get_boolean('POLYAXON_CHART_IS_UPGRADE')
Fix type of env variable is chart upgrade
Fix type of env variable is chart upgrade
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
from polyaxon.utils import config CLI_MIN_VERSION = config.get_string('POLYAXON_CLI_MIN_VERSION', is_optional=True, default='0.0.0') CLI_LATEST_VERSION = config.get_string('POLYAXON_CLI_LATEST_VERSION', is_optional=True, default='0.0.0') PLATFORM_MIN_VERSION = config.get_string('POLYAXON_PLATFORM_MIN_VERSION', is_optional=True, default='0.0.0') PLATFORM_LATEST_VERSION = config.get_string('POLYAXON_PLATFORM_LATEST_VERSION', is_optional=True, default='0.0.0') LIB_MIN_VERSION = config.get_string('POLYAXON_LIB_MIN_VERSION', is_optional=True, default='0.0.0') LIB_LATEST_VERSION = config.get_string('POLYAXON_LIB_LATEST_VERSION', is_optional=True, default='0.0.0') CHART_VERSION = config.get_string('POLYAXON_CHART_VERSION', is_optional=True, default='0.0.0') CHART_IS_UPGRADE = config.get_string('POLYAXON_CHART_IS_UPGRADE') Fix type of env variable is chart upgrade
from polyaxon.utils import config CLI_MIN_VERSION = config.get_string('POLYAXON_CLI_MIN_VERSION', is_optional=True, default='0.0.0') CLI_LATEST_VERSION = config.get_string('POLYAXON_CLI_LATEST_VERSION', is_optional=True, default='0.0.0') PLATFORM_MIN_VERSION = config.get_string('POLYAXON_PLATFORM_MIN_VERSION', is_optional=True, default='0.0.0') PLATFORM_LATEST_VERSION = config.get_string('POLYAXON_PLATFORM_LATEST_VERSION', is_optional=True, default='0.0.0') LIB_MIN_VERSION = config.get_string('POLYAXON_LIB_MIN_VERSION', is_optional=True, default='0.0.0') LIB_LATEST_VERSION = config.get_string('POLYAXON_LIB_LATEST_VERSION', is_optional=True, default='0.0.0') CHART_VERSION = config.get_string('POLYAXON_CHART_VERSION', is_optional=True, default='0.0.0') CHART_IS_UPGRADE = config.get_boolean('POLYAXON_CHART_IS_UPGRADE')
<commit_before>from polyaxon.utils import config CLI_MIN_VERSION = config.get_string('POLYAXON_CLI_MIN_VERSION', is_optional=True, default='0.0.0') CLI_LATEST_VERSION = config.get_string('POLYAXON_CLI_LATEST_VERSION', is_optional=True, default='0.0.0') PLATFORM_MIN_VERSION = config.get_string('POLYAXON_PLATFORM_MIN_VERSION', is_optional=True, default='0.0.0') PLATFORM_LATEST_VERSION = config.get_string('POLYAXON_PLATFORM_LATEST_VERSION', is_optional=True, default='0.0.0') LIB_MIN_VERSION = config.get_string('POLYAXON_LIB_MIN_VERSION', is_optional=True, default='0.0.0') LIB_LATEST_VERSION = config.get_string('POLYAXON_LIB_LATEST_VERSION', is_optional=True, default='0.0.0') CHART_VERSION = config.get_string('POLYAXON_CHART_VERSION', is_optional=True, default='0.0.0') CHART_IS_UPGRADE = config.get_string('POLYAXON_CHART_IS_UPGRADE') <commit_msg>Fix type of env variable is chart upgrade<commit_after>
from polyaxon.utils import config CLI_MIN_VERSION = config.get_string('POLYAXON_CLI_MIN_VERSION', is_optional=True, default='0.0.0') CLI_LATEST_VERSION = config.get_string('POLYAXON_CLI_LATEST_VERSION', is_optional=True, default='0.0.0') PLATFORM_MIN_VERSION = config.get_string('POLYAXON_PLATFORM_MIN_VERSION', is_optional=True, default='0.0.0') PLATFORM_LATEST_VERSION = config.get_string('POLYAXON_PLATFORM_LATEST_VERSION', is_optional=True, default='0.0.0') LIB_MIN_VERSION = config.get_string('POLYAXON_LIB_MIN_VERSION', is_optional=True, default='0.0.0') LIB_LATEST_VERSION = config.get_string('POLYAXON_LIB_LATEST_VERSION', is_optional=True, default='0.0.0') CHART_VERSION = config.get_string('POLYAXON_CHART_VERSION', is_optional=True, default='0.0.0') CHART_IS_UPGRADE = config.get_boolean('POLYAXON_CHART_IS_UPGRADE')
from polyaxon.utils import config CLI_MIN_VERSION = config.get_string('POLYAXON_CLI_MIN_VERSION', is_optional=True, default='0.0.0') CLI_LATEST_VERSION = config.get_string('POLYAXON_CLI_LATEST_VERSION', is_optional=True, default='0.0.0') PLATFORM_MIN_VERSION = config.get_string('POLYAXON_PLATFORM_MIN_VERSION', is_optional=True, default='0.0.0') PLATFORM_LATEST_VERSION = config.get_string('POLYAXON_PLATFORM_LATEST_VERSION', is_optional=True, default='0.0.0') LIB_MIN_VERSION = config.get_string('POLYAXON_LIB_MIN_VERSION', is_optional=True, default='0.0.0') LIB_LATEST_VERSION = config.get_string('POLYAXON_LIB_LATEST_VERSION', is_optional=True, default='0.0.0') CHART_VERSION = config.get_string('POLYAXON_CHART_VERSION', is_optional=True, default='0.0.0') CHART_IS_UPGRADE = config.get_string('POLYAXON_CHART_IS_UPGRADE') Fix type of env variable is chart upgradefrom polyaxon.utils import config CLI_MIN_VERSION = config.get_string('POLYAXON_CLI_MIN_VERSION', is_optional=True, default='0.0.0') CLI_LATEST_VERSION = config.get_string('POLYAXON_CLI_LATEST_VERSION', is_optional=True, default='0.0.0') PLATFORM_MIN_VERSION = config.get_string('POLYAXON_PLATFORM_MIN_VERSION', is_optional=True, default='0.0.0') PLATFORM_LATEST_VERSION = config.get_string('POLYAXON_PLATFORM_LATEST_VERSION', is_optional=True, default='0.0.0') LIB_MIN_VERSION = config.get_string('POLYAXON_LIB_MIN_VERSION', is_optional=True, default='0.0.0') LIB_LATEST_VERSION = config.get_string('POLYAXON_LIB_LATEST_VERSION', is_optional=True, default='0.0.0') CHART_VERSION = config.get_string('POLYAXON_CHART_VERSION', is_optional=True, default='0.0.0') CHART_IS_UPGRADE = config.get_boolean('POLYAXON_CHART_IS_UPGRADE')
<commit_before>from polyaxon.utils import config CLI_MIN_VERSION = config.get_string('POLYAXON_CLI_MIN_VERSION', is_optional=True, default='0.0.0') CLI_LATEST_VERSION = config.get_string('POLYAXON_CLI_LATEST_VERSION', is_optional=True, default='0.0.0') PLATFORM_MIN_VERSION = config.get_string('POLYAXON_PLATFORM_MIN_VERSION', is_optional=True, default='0.0.0') PLATFORM_LATEST_VERSION = config.get_string('POLYAXON_PLATFORM_LATEST_VERSION', is_optional=True, default='0.0.0') LIB_MIN_VERSION = config.get_string('POLYAXON_LIB_MIN_VERSION', is_optional=True, default='0.0.0') LIB_LATEST_VERSION = config.get_string('POLYAXON_LIB_LATEST_VERSION', is_optional=True, default='0.0.0') CHART_VERSION = config.get_string('POLYAXON_CHART_VERSION', is_optional=True, default='0.0.0') CHART_IS_UPGRADE = config.get_string('POLYAXON_CHART_IS_UPGRADE') <commit_msg>Fix type of env variable is chart upgrade<commit_after>from polyaxon.utils import config CLI_MIN_VERSION = config.get_string('POLYAXON_CLI_MIN_VERSION', is_optional=True, default='0.0.0') CLI_LATEST_VERSION = config.get_string('POLYAXON_CLI_LATEST_VERSION', is_optional=True, default='0.0.0') PLATFORM_MIN_VERSION = config.get_string('POLYAXON_PLATFORM_MIN_VERSION', is_optional=True, default='0.0.0') PLATFORM_LATEST_VERSION = config.get_string('POLYAXON_PLATFORM_LATEST_VERSION', is_optional=True, default='0.0.0') LIB_MIN_VERSION = config.get_string('POLYAXON_LIB_MIN_VERSION', is_optional=True, default='0.0.0') LIB_LATEST_VERSION = config.get_string('POLYAXON_LIB_LATEST_VERSION', is_optional=True, default='0.0.0') CHART_VERSION = config.get_string('POLYAXON_CHART_VERSION', is_optional=True, default='0.0.0') CHART_IS_UPGRADE = config.get_boolean('POLYAXON_CHART_IS_UPGRADE')
4250ae648ab975076fa8d87c8b40c0eca990fff7
numba/intrinsic/__init__.py
numba/intrinsic/__init__.py
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import llvm.core from .intrinsic import IntrinsicLibrary from .numba_intrinsic import is_numba_intrinsic __all__ = [] all = {} def _import_all(): global __all__ mods = ['math_intrinsic', 'string_intrinsic'] for k in mods: mod = __import__(__name__ + '.' + k, fromlist=['__all__']) __all__.extend(mod.__all__) for k in mod.__all__: all[k] = globals()[k] = getattr(mod, k) _import_all() def default_intrinsic_library(context): '''Build an intrinsic library with a default set of external functions. context --- numba context TODO: It is possible to cache the default intrinsic library as a bitcode file on disk so that we don't build it every time. ''' intrlib = IntrinsicLibrary(context) # install intrinsics for fncls in all.itervalues(): intrlib.add(fncls) return intrlib
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import llvm.core from .intrinsic import IntrinsicLibrary from .numba_intrinsic import is_numba_intrinsic __all__ = [] all = {} def _import_all(): global __all__ mods = ['string_intrinsic'] for k in mods: mod = __import__(__name__ + '.' + k, fromlist=['__all__']) __all__.extend(mod.__all__) for k in mod.__all__: all[k] = globals()[k] = getattr(mod, k) _import_all() def default_intrinsic_library(context): '''Build an intrinsic library with a default set of external functions. context --- numba context TODO: It is possible to cache the default intrinsic library as a bitcode file on disk so that we don't build it every time. ''' intrlib = IntrinsicLibrary(context) # install intrinsics for fncls in all.itervalues(): intrlib.add(fncls) return intrlib
Fix old import of math intrinsics
Fix old import of math intrinsics
Python
bsd-2-clause
gmarkall/numba,sklam/numba,numba/numba,GaZ3ll3/numba,pombredanne/numba,pombredanne/numba,pombredanne/numba,gdementen/numba,stefanseefeld/numba,cpcloud/numba,numba/numba,shiquanwang/numba,IntelLabs/numba,stonebig/numba,sklam/numba,pitrou/numba,seibert/numba,IntelLabs/numba,numba/numba,gmarkall/numba,pombredanne/numba,cpcloud/numba,gdementen/numba,stonebig/numba,shiquanwang/numba,GaZ3ll3/numba,seibert/numba,gdementen/numba,numba/numba,IntelLabs/numba,stuartarchibald/numba,jriehl/numba,seibert/numba,GaZ3ll3/numba,gmarkall/numba,gdementen/numba,gmarkall/numba,pitrou/numba,stonebig/numba,sklam/numba,cpcloud/numba,seibert/numba,seibert/numba,sklam/numba,ssarangi/numba,gmarkall/numba,jriehl/numba,numba/numba,pombredanne/numba,IntelLabs/numba,sklam/numba,stefanseefeld/numba,jriehl/numba,ssarangi/numba,ssarangi/numba,cpcloud/numba,jriehl/numba,stefanseefeld/numba,stefanseefeld/numba,stonebig/numba,IntelLabs/numba,stonebig/numba,stuartarchibald/numba,GaZ3ll3/numba,stuartarchibald/numba,ssarangi/numba,pitrou/numba,pitrou/numba,pitrou/numba,gdementen/numba,ssarangi/numba,cpcloud/numba,jriehl/numba,stuartarchibald/numba,stuartarchibald/numba,stefanseefeld/numba,shiquanwang/numba,GaZ3ll3/numba
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import llvm.core from .intrinsic import IntrinsicLibrary from .numba_intrinsic import is_numba_intrinsic __all__ = [] all = {} def _import_all(): global __all__ mods = ['math_intrinsic', 'string_intrinsic'] for k in mods: mod = __import__(__name__ + '.' + k, fromlist=['__all__']) __all__.extend(mod.__all__) for k in mod.__all__: all[k] = globals()[k] = getattr(mod, k) _import_all() def default_intrinsic_library(context): '''Build an intrinsic library with a default set of external functions. context --- numba context TODO: It is possible to cache the default intrinsic library as a bitcode file on disk so that we don't build it every time. ''' intrlib = IntrinsicLibrary(context) # install intrinsics for fncls in all.itervalues(): intrlib.add(fncls) return intrlib Fix old import of math intrinsics
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import llvm.core from .intrinsic import IntrinsicLibrary from .numba_intrinsic import is_numba_intrinsic __all__ = [] all = {} def _import_all(): global __all__ mods = ['string_intrinsic'] for k in mods: mod = __import__(__name__ + '.' + k, fromlist=['__all__']) __all__.extend(mod.__all__) for k in mod.__all__: all[k] = globals()[k] = getattr(mod, k) _import_all() def default_intrinsic_library(context): '''Build an intrinsic library with a default set of external functions. context --- numba context TODO: It is possible to cache the default intrinsic library as a bitcode file on disk so that we don't build it every time. ''' intrlib = IntrinsicLibrary(context) # install intrinsics for fncls in all.itervalues(): intrlib.add(fncls) return intrlib
<commit_before># -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import llvm.core from .intrinsic import IntrinsicLibrary from .numba_intrinsic import is_numba_intrinsic __all__ = [] all = {} def _import_all(): global __all__ mods = ['math_intrinsic', 'string_intrinsic'] for k in mods: mod = __import__(__name__ + '.' + k, fromlist=['__all__']) __all__.extend(mod.__all__) for k in mod.__all__: all[k] = globals()[k] = getattr(mod, k) _import_all() def default_intrinsic_library(context): '''Build an intrinsic library with a default set of external functions. context --- numba context TODO: It is possible to cache the default intrinsic library as a bitcode file on disk so that we don't build it every time. ''' intrlib = IntrinsicLibrary(context) # install intrinsics for fncls in all.itervalues(): intrlib.add(fncls) return intrlib <commit_msg>Fix old import of math intrinsics<commit_after>
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import llvm.core from .intrinsic import IntrinsicLibrary from .numba_intrinsic import is_numba_intrinsic __all__ = [] all = {} def _import_all(): global __all__ mods = ['string_intrinsic'] for k in mods: mod = __import__(__name__ + '.' + k, fromlist=['__all__']) __all__.extend(mod.__all__) for k in mod.__all__: all[k] = globals()[k] = getattr(mod, k) _import_all() def default_intrinsic_library(context): '''Build an intrinsic library with a default set of external functions. context --- numba context TODO: It is possible to cache the default intrinsic library as a bitcode file on disk so that we don't build it every time. ''' intrlib = IntrinsicLibrary(context) # install intrinsics for fncls in all.itervalues(): intrlib.add(fncls) return intrlib
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import llvm.core from .intrinsic import IntrinsicLibrary from .numba_intrinsic import is_numba_intrinsic __all__ = [] all = {} def _import_all(): global __all__ mods = ['math_intrinsic', 'string_intrinsic'] for k in mods: mod = __import__(__name__ + '.' + k, fromlist=['__all__']) __all__.extend(mod.__all__) for k in mod.__all__: all[k] = globals()[k] = getattr(mod, k) _import_all() def default_intrinsic_library(context): '''Build an intrinsic library with a default set of external functions. context --- numba context TODO: It is possible to cache the default intrinsic library as a bitcode file on disk so that we don't build it every time. ''' intrlib = IntrinsicLibrary(context) # install intrinsics for fncls in all.itervalues(): intrlib.add(fncls) return intrlib Fix old import of math intrinsics# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import llvm.core from .intrinsic import IntrinsicLibrary from .numba_intrinsic import is_numba_intrinsic __all__ = [] all = {} def _import_all(): global __all__ mods = ['string_intrinsic'] for k in mods: mod = __import__(__name__ + '.' + k, fromlist=['__all__']) __all__.extend(mod.__all__) for k in mod.__all__: all[k] = globals()[k] = getattr(mod, k) _import_all() def default_intrinsic_library(context): '''Build an intrinsic library with a default set of external functions. context --- numba context TODO: It is possible to cache the default intrinsic library as a bitcode file on disk so that we don't build it every time. ''' intrlib = IntrinsicLibrary(context) # install intrinsics for fncls in all.itervalues(): intrlib.add(fncls) return intrlib
<commit_before># -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import llvm.core from .intrinsic import IntrinsicLibrary from .numba_intrinsic import is_numba_intrinsic __all__ = [] all = {} def _import_all(): global __all__ mods = ['math_intrinsic', 'string_intrinsic'] for k in mods: mod = __import__(__name__ + '.' + k, fromlist=['__all__']) __all__.extend(mod.__all__) for k in mod.__all__: all[k] = globals()[k] = getattr(mod, k) _import_all() def default_intrinsic_library(context): '''Build an intrinsic library with a default set of external functions. context --- numba context TODO: It is possible to cache the default intrinsic library as a bitcode file on disk so that we don't build it every time. ''' intrlib = IntrinsicLibrary(context) # install intrinsics for fncls in all.itervalues(): intrlib.add(fncls) return intrlib <commit_msg>Fix old import of math intrinsics<commit_after># -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import import llvm.core from .intrinsic import IntrinsicLibrary from .numba_intrinsic import is_numba_intrinsic __all__ = [] all = {} def _import_all(): global __all__ mods = ['string_intrinsic'] for k in mods: mod = __import__(__name__ + '.' + k, fromlist=['__all__']) __all__.extend(mod.__all__) for k in mod.__all__: all[k] = globals()[k] = getattr(mod, k) _import_all() def default_intrinsic_library(context): '''Build an intrinsic library with a default set of external functions. context --- numba context TODO: It is possible to cache the default intrinsic library as a bitcode file on disk so that we don't build it every time. ''' intrlib = IntrinsicLibrary(context) # install intrinsics for fncls in all.itervalues(): intrlib.add(fncls) return intrlib
f6ed06bf16329f075b52b89f2fdfb061bb1355c1
mitmproxy/builtins/replace.py
mitmproxy/builtins/replace.py
import re from mitmproxy import exceptions from mitmproxy import filt class Replace: def __init__(self): self.lst = [] def configure(self, options, updated): """ .replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as bytes. s: the replacement string, as bytes """ lst = [] for fpatt, rex, s in options.replacements: cpatt = filt.parse(fpatt) if not cpatt: raise exceptions.OptionsError( "Invalid filter pattern: %s" % fpatt ) try: re.compile(rex) except re.error as e: raise exceptions.OptionsError( "Invalid regular expression: %s - %s" % (rex, str(e)) ) lst.append((rex, s, cpatt)) self.lst = lst def execute(self, f): for rex, s, cpatt in self.lst: if cpatt(f): if f.response: f.response.replace(rex, s, re.DOTALL) else: f.request.replace(rex, s, re.DOTALL) def request(self, flow): if not flow.reply.has_message: self.execute(flow) def response(self, flow): if not flow.reply.has_message: self.execute(flow)
import re from mitmproxy import exceptions from mitmproxy import filt class Replace: def __init__(self): self.lst = [] def configure(self, options, updated): """ .replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as bytes. s: the replacement string, as bytes """ lst = [] for fpatt, rex, s in options.replacements: cpatt = filt.parse(fpatt) if not cpatt: raise exceptions.OptionsError( "Invalid filter pattern: %s" % fpatt ) try: re.compile(rex) except re.error as e: raise exceptions.OptionsError( "Invalid regular expression: %s - %s" % (rex, str(e)) ) lst.append((rex, s, cpatt)) self.lst = lst def execute(self, f): for rex, s, cpatt in self.lst: if cpatt(f): if f.response: f.response.replace(rex, s, flags=re.DOTALL) else: f.request.replace(rex, s, flags=re.DOTALL) def request(self, flow): if not flow.reply.has_message: self.execute(flow) def response(self, flow): if not flow.reply.has_message: self.execute(flow)
Convert to flags=value for future compatibility
Convert to flags=value for future compatibility
Python
mit
mhils/mitmproxy,zlorb/mitmproxy,vhaupert/mitmproxy,laurmurclar/mitmproxy,mosajjal/mitmproxy,MatthewShao/mitmproxy,StevenVanAcker/mitmproxy,cortesi/mitmproxy,Kriechi/mitmproxy,Kriechi/mitmproxy,mhils/mitmproxy,MatthewShao/mitmproxy,mitmproxy/mitmproxy,StevenVanAcker/mitmproxy,mitmproxy/mitmproxy,laurmurclar/mitmproxy,StevenVanAcker/mitmproxy,Kriechi/mitmproxy,xaxa89/mitmproxy,dwfreed/mitmproxy,dwfreed/mitmproxy,zlorb/mitmproxy,mhils/mitmproxy,laurmurclar/mitmproxy,ddworken/mitmproxy,dwfreed/mitmproxy,mitmproxy/mitmproxy,StevenVanAcker/mitmproxy,ddworken/mitmproxy,xaxa89/mitmproxy,vhaupert/mitmproxy,ujjwal96/mitmproxy,cortesi/mitmproxy,xaxa89/mitmproxy,ujjwal96/mitmproxy,zlorb/mitmproxy,ddworken/mitmproxy,xaxa89/mitmproxy,cortesi/mitmproxy,mhils/mitmproxy,ddworken/mitmproxy,dwfreed/mitmproxy,mosajjal/mitmproxy,Kriechi/mitmproxy,vhaupert/mitmproxy,cortesi/mitmproxy,mosajjal/mitmproxy,mitmproxy/mitmproxy,vhaupert/mitmproxy,MatthewShao/mitmproxy,ujjwal96/mitmproxy,zlorb/mitmproxy,mosajjal/mitmproxy,mhils/mitmproxy,MatthewShao/mitmproxy,mitmproxy/mitmproxy,laurmurclar/mitmproxy,ujjwal96/mitmproxy
import re from mitmproxy import exceptions from mitmproxy import filt class Replace: def __init__(self): self.lst = [] def configure(self, options, updated): """ .replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as bytes. s: the replacement string, as bytes """ lst = [] for fpatt, rex, s in options.replacements: cpatt = filt.parse(fpatt) if not cpatt: raise exceptions.OptionsError( "Invalid filter pattern: %s" % fpatt ) try: re.compile(rex) except re.error as e: raise exceptions.OptionsError( "Invalid regular expression: %s - %s" % (rex, str(e)) ) lst.append((rex, s, cpatt)) self.lst = lst def execute(self, f): for rex, s, cpatt in self.lst: if cpatt(f): if f.response: f.response.replace(rex, s, re.DOTALL) else: f.request.replace(rex, s, re.DOTALL) def request(self, flow): if not flow.reply.has_message: self.execute(flow) def response(self, flow): if not flow.reply.has_message: self.execute(flow) Convert to flags=value for future compatibility
import re from mitmproxy import exceptions from mitmproxy import filt class Replace: def __init__(self): self.lst = [] def configure(self, options, updated): """ .replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as bytes. s: the replacement string, as bytes """ lst = [] for fpatt, rex, s in options.replacements: cpatt = filt.parse(fpatt) if not cpatt: raise exceptions.OptionsError( "Invalid filter pattern: %s" % fpatt ) try: re.compile(rex) except re.error as e: raise exceptions.OptionsError( "Invalid regular expression: %s - %s" % (rex, str(e)) ) lst.append((rex, s, cpatt)) self.lst = lst def execute(self, f): for rex, s, cpatt in self.lst: if cpatt(f): if f.response: f.response.replace(rex, s, flags=re.DOTALL) else: f.request.replace(rex, s, flags=re.DOTALL) def request(self, flow): if not flow.reply.has_message: self.execute(flow) def response(self, flow): if not flow.reply.has_message: self.execute(flow)
<commit_before>import re from mitmproxy import exceptions from mitmproxy import filt class Replace: def __init__(self): self.lst = [] def configure(self, options, updated): """ .replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as bytes. s: the replacement string, as bytes """ lst = [] for fpatt, rex, s in options.replacements: cpatt = filt.parse(fpatt) if not cpatt: raise exceptions.OptionsError( "Invalid filter pattern: %s" % fpatt ) try: re.compile(rex) except re.error as e: raise exceptions.OptionsError( "Invalid regular expression: %s - %s" % (rex, str(e)) ) lst.append((rex, s, cpatt)) self.lst = lst def execute(self, f): for rex, s, cpatt in self.lst: if cpatt(f): if f.response: f.response.replace(rex, s, re.DOTALL) else: f.request.replace(rex, s, re.DOTALL) def request(self, flow): if not flow.reply.has_message: self.execute(flow) def response(self, flow): if not flow.reply.has_message: self.execute(flow) <commit_msg>Convert to flags=value for future compatibility<commit_after>
import re from mitmproxy import exceptions from mitmproxy import filt class Replace: def __init__(self): self.lst = [] def configure(self, options, updated): """ .replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as bytes. s: the replacement string, as bytes """ lst = [] for fpatt, rex, s in options.replacements: cpatt = filt.parse(fpatt) if not cpatt: raise exceptions.OptionsError( "Invalid filter pattern: %s" % fpatt ) try: re.compile(rex) except re.error as e: raise exceptions.OptionsError( "Invalid regular expression: %s - %s" % (rex, str(e)) ) lst.append((rex, s, cpatt)) self.lst = lst def execute(self, f): for rex, s, cpatt in self.lst: if cpatt(f): if f.response: f.response.replace(rex, s, flags=re.DOTALL) else: f.request.replace(rex, s, flags=re.DOTALL) def request(self, flow): if not flow.reply.has_message: self.execute(flow) def response(self, flow): if not flow.reply.has_message: self.execute(flow)
import re from mitmproxy import exceptions from mitmproxy import filt class Replace: def __init__(self): self.lst = [] def configure(self, options, updated): """ .replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as bytes. s: the replacement string, as bytes """ lst = [] for fpatt, rex, s in options.replacements: cpatt = filt.parse(fpatt) if not cpatt: raise exceptions.OptionsError( "Invalid filter pattern: %s" % fpatt ) try: re.compile(rex) except re.error as e: raise exceptions.OptionsError( "Invalid regular expression: %s - %s" % (rex, str(e)) ) lst.append((rex, s, cpatt)) self.lst = lst def execute(self, f): for rex, s, cpatt in self.lst: if cpatt(f): if f.response: f.response.replace(rex, s, re.DOTALL) else: f.request.replace(rex, s, re.DOTALL) def request(self, flow): if not flow.reply.has_message: self.execute(flow) def response(self, flow): if not flow.reply.has_message: self.execute(flow) Convert to flags=value for future compatibilityimport re from mitmproxy import exceptions from mitmproxy import filt class Replace: def __init__(self): self.lst = [] def configure(self, options, updated): """ .replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as bytes. s: the replacement string, as bytes """ lst = [] for fpatt, rex, s in options.replacements: cpatt = filt.parse(fpatt) if not cpatt: raise exceptions.OptionsError( "Invalid filter pattern: %s" % fpatt ) try: re.compile(rex) except re.error as e: raise exceptions.OptionsError( "Invalid regular expression: %s - %s" % (rex, str(e)) ) lst.append((rex, s, cpatt)) self.lst = lst def execute(self, f): for rex, s, cpatt in self.lst: if cpatt(f): if f.response: f.response.replace(rex, s, flags=re.DOTALL) else: f.request.replace(rex, s, flags=re.DOTALL) def request(self, flow): if not flow.reply.has_message: self.execute(flow) def response(self, flow): if not flow.reply.has_message: self.execute(flow)
<commit_before>import re from mitmproxy import exceptions from mitmproxy import filt class Replace: def __init__(self): self.lst = [] def configure(self, options, updated): """ .replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as bytes. s: the replacement string, as bytes """ lst = [] for fpatt, rex, s in options.replacements: cpatt = filt.parse(fpatt) if not cpatt: raise exceptions.OptionsError( "Invalid filter pattern: %s" % fpatt ) try: re.compile(rex) except re.error as e: raise exceptions.OptionsError( "Invalid regular expression: %s - %s" % (rex, str(e)) ) lst.append((rex, s, cpatt)) self.lst = lst def execute(self, f): for rex, s, cpatt in self.lst: if cpatt(f): if f.response: f.response.replace(rex, s, re.DOTALL) else: f.request.replace(rex, s, re.DOTALL) def request(self, flow): if not flow.reply.has_message: self.execute(flow) def response(self, flow): if not flow.reply.has_message: self.execute(flow) <commit_msg>Convert to flags=value for future compatibility<commit_after>import re from mitmproxy import exceptions from mitmproxy import filt class Replace: def __init__(self): self.lst = [] def configure(self, options, updated): """ .replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as bytes. s: the replacement string, as bytes """ lst = [] for fpatt, rex, s in options.replacements: cpatt = filt.parse(fpatt) if not cpatt: raise exceptions.OptionsError( "Invalid filter pattern: %s" % fpatt ) try: re.compile(rex) except re.error as e: raise exceptions.OptionsError( "Invalid regular expression: %s - %s" % (rex, str(e)) ) lst.append((rex, s, cpatt)) self.lst = lst def execute(self, f): for rex, s, cpatt in self.lst: if cpatt(f): if f.response: f.response.replace(rex, s, flags=re.DOTALL) else: f.request.replace(rex, s, flags=re.DOTALL) def request(self, flow): if not flow.reply.has_message: self.execute(flow) def response(self, flow): if not flow.reply.has_message: self.execute(flow)
41fd6e8aae4044520a2e44d590c005dd71150c0c
web/attempts/migrations/0008_add_submission_date.py
web/attempts/migrations/0008_add_submission_date.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = subquery.submission_date FROM ( SELECT user_id, part_id, max(history_date) AS submission_date FROM attempts_historicalattempt GROUP BY user_id, part_id ) AS subquery WHERE attempts_attempt.user_id = subquery.user_id AND attempts_attempt.part_id = subquery.part_id ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = ( SELECT max(history_date) FROM attempts_historicalattempt WHERE attempts_attempt.user_id = user_id AND attempts_attempt.part_id = part_id ) ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ]
Revert "Revert "Make migration SQLite compatible""
Revert "Revert "Make migration SQLite compatible"" This reverts commit b16016994f20945a8a2bbb63b9cb920d856ab66f.
Python
agpl-3.0
ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = subquery.submission_date FROM ( SELECT user_id, part_id, max(history_date) AS submission_date FROM attempts_historicalattempt GROUP BY user_id, part_id ) AS subquery WHERE attempts_attempt.user_id = subquery.user_id AND attempts_attempt.part_id = subquery.part_id ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ] Revert "Revert "Make migration SQLite compatible"" This reverts commit b16016994f20945a8a2bbb63b9cb920d856ab66f.
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = ( SELECT max(history_date) FROM attempts_historicalattempt WHERE attempts_attempt.user_id = user_id AND attempts_attempt.part_id = part_id ) ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = subquery.submission_date FROM ( SELECT user_id, part_id, max(history_date) AS submission_date FROM attempts_historicalattempt GROUP BY user_id, part_id ) AS subquery WHERE attempts_attempt.user_id = subquery.user_id AND attempts_attempt.part_id = subquery.part_id ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ] <commit_msg>Revert "Revert "Make migration SQLite compatible"" This reverts commit b16016994f20945a8a2bbb63b9cb920d856ab66f.<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = ( SELECT max(history_date) FROM attempts_historicalattempt WHERE attempts_attempt.user_id = user_id AND attempts_attempt.part_id = part_id ) ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = subquery.submission_date FROM ( SELECT user_id, part_id, max(history_date) AS submission_date FROM attempts_historicalattempt GROUP BY user_id, part_id ) AS subquery WHERE attempts_attempt.user_id = subquery.user_id AND attempts_attempt.part_id = subquery.part_id ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ] Revert "Revert "Make migration SQLite compatible"" This reverts commit b16016994f20945a8a2bbb63b9cb920d856ab66f.# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = ( SELECT max(history_date) FROM attempts_historicalattempt WHERE attempts_attempt.user_id = user_id AND attempts_attempt.part_id = part_id ) ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = subquery.submission_date FROM ( SELECT user_id, part_id, max(history_date) AS submission_date FROM attempts_historicalattempt GROUP BY user_id, part_id ) AS subquery WHERE attempts_attempt.user_id = subquery.user_id AND attempts_attempt.part_id = subquery.part_id ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ] <commit_msg>Revert "Revert "Make migration SQLite compatible"" This reverts commit b16016994f20945a8a2bbb63b9cb920d856ab66f.<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = ( SELECT max(history_date) FROM attempts_historicalattempt WHERE attempts_attempt.user_id = user_id AND attempts_attempt.part_id = part_id ) ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ]
6edb40320a5338cbdb70321170f21ff9beb9d39a
personal-site/blog/views.py
personal-site/blog/views.py
from django.shortcuts import render from django.views.generic.base import TemplateView from django.views.generic.detail import DetailView from taggit.models import Tag from blog.models import Post class BlogHomeView(TemplateView): template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogHomeView, self).get_context_data(**kwargs) context['post'] = Post.objects.order_by('-pub_date', '-pub_time').first() context['posts'] = Post.objects.grouped_by_date() return context class BlogPostView(DetailView): model = Post context_object_name = 'post' template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogPostView, self).get_context_data(**kwargs) context['posts'] = Post.objects.grouped_by_date() return context class BlogTagView(TemplateView): template_name = 'blog/tag.html' def get_context_data(self, **kwargs): context = super(BlogTagView, self).get_context_data(**kwargs) tagslug = kwargs['slug'] tag = Tag.objects.get(slug=tagslug) context['tag'] = tag.name context['taggedposts'] = Post.objects.filter(tags__name__in=[tag.name]).distinct() context['posts'] = Post.objects.grouped_by_date() return context
from django.shortcuts import render from django.views.generic.base import TemplateView from django.views.generic.detail import DetailView from taggit.models import Tag from blog.models import Post class BlogHomeView(TemplateView): template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogHomeView, self).get_context_data(**kwargs) context['post'] = Post.objects.order_by('-pub_date', '-pub_time').first() context['posts'] = Post.objects.grouped_by_date() return context class BlogPostView(DetailView): model = Post context_object_name = 'post' template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogPostView, self).get_context_data(**kwargs) context['posts'] = Post.objects.grouped_by_date() return context class BlogTagView(TemplateView): template_name = 'blog/tag.html' def get_context_data(self, **kwargs): context = super(BlogTagView, self).get_context_data(**kwargs) tagslug = kwargs['slug'] tag = Tag.objects.get(slug=tagslug) context['tag'] = tag.name context['taggedposts'] = Post.objects.filter(tags__name__exact=tag.name).distinct() context['posts'] = Post.objects.grouped_by_date() return context
Fix field lookup for tag
Fix field lookup for tag
Python
bsd-3-clause
brandonw/personal-site,brandonw/personal-site,brandonw/personal-site
from django.shortcuts import render from django.views.generic.base import TemplateView from django.views.generic.detail import DetailView from taggit.models import Tag from blog.models import Post class BlogHomeView(TemplateView): template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogHomeView, self).get_context_data(**kwargs) context['post'] = Post.objects.order_by('-pub_date', '-pub_time').first() context['posts'] = Post.objects.grouped_by_date() return context class BlogPostView(DetailView): model = Post context_object_name = 'post' template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogPostView, self).get_context_data(**kwargs) context['posts'] = Post.objects.grouped_by_date() return context class BlogTagView(TemplateView): template_name = 'blog/tag.html' def get_context_data(self, **kwargs): context = super(BlogTagView, self).get_context_data(**kwargs) tagslug = kwargs['slug'] tag = Tag.objects.get(slug=tagslug) context['tag'] = tag.name context['taggedposts'] = Post.objects.filter(tags__name__in=[tag.name]).distinct() context['posts'] = Post.objects.grouped_by_date() return context Fix field lookup for tag
from django.shortcuts import render from django.views.generic.base import TemplateView from django.views.generic.detail import DetailView from taggit.models import Tag from blog.models import Post class BlogHomeView(TemplateView): template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogHomeView, self).get_context_data(**kwargs) context['post'] = Post.objects.order_by('-pub_date', '-pub_time').first() context['posts'] = Post.objects.grouped_by_date() return context class BlogPostView(DetailView): model = Post context_object_name = 'post' template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogPostView, self).get_context_data(**kwargs) context['posts'] = Post.objects.grouped_by_date() return context class BlogTagView(TemplateView): template_name = 'blog/tag.html' def get_context_data(self, **kwargs): context = super(BlogTagView, self).get_context_data(**kwargs) tagslug = kwargs['slug'] tag = Tag.objects.get(slug=tagslug) context['tag'] = tag.name context['taggedposts'] = Post.objects.filter(tags__name__exact=tag.name).distinct() context['posts'] = Post.objects.grouped_by_date() return context
<commit_before>from django.shortcuts import render from django.views.generic.base import TemplateView from django.views.generic.detail import DetailView from taggit.models import Tag from blog.models import Post class BlogHomeView(TemplateView): template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogHomeView, self).get_context_data(**kwargs) context['post'] = Post.objects.order_by('-pub_date', '-pub_time').first() context['posts'] = Post.objects.grouped_by_date() return context class BlogPostView(DetailView): model = Post context_object_name = 'post' template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogPostView, self).get_context_data(**kwargs) context['posts'] = Post.objects.grouped_by_date() return context class BlogTagView(TemplateView): template_name = 'blog/tag.html' def get_context_data(self, **kwargs): context = super(BlogTagView, self).get_context_data(**kwargs) tagslug = kwargs['slug'] tag = Tag.objects.get(slug=tagslug) context['tag'] = tag.name context['taggedposts'] = Post.objects.filter(tags__name__in=[tag.name]).distinct() context['posts'] = Post.objects.grouped_by_date() return context <commit_msg>Fix field lookup for tag<commit_after>
from django.shortcuts import render from django.views.generic.base import TemplateView from django.views.generic.detail import DetailView from taggit.models import Tag from blog.models import Post class BlogHomeView(TemplateView): template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogHomeView, self).get_context_data(**kwargs) context['post'] = Post.objects.order_by('-pub_date', '-pub_time').first() context['posts'] = Post.objects.grouped_by_date() return context class BlogPostView(DetailView): model = Post context_object_name = 'post' template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogPostView, self).get_context_data(**kwargs) context['posts'] = Post.objects.grouped_by_date() return context class BlogTagView(TemplateView): template_name = 'blog/tag.html' def get_context_data(self, **kwargs): context = super(BlogTagView, self).get_context_data(**kwargs) tagslug = kwargs['slug'] tag = Tag.objects.get(slug=tagslug) context['tag'] = tag.name context['taggedposts'] = Post.objects.filter(tags__name__exact=tag.name).distinct() context['posts'] = Post.objects.grouped_by_date() return context
from django.shortcuts import render from django.views.generic.base import TemplateView from django.views.generic.detail import DetailView from taggit.models import Tag from blog.models import Post class BlogHomeView(TemplateView): template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogHomeView, self).get_context_data(**kwargs) context['post'] = Post.objects.order_by('-pub_date', '-pub_time').first() context['posts'] = Post.objects.grouped_by_date() return context class BlogPostView(DetailView): model = Post context_object_name = 'post' template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogPostView, self).get_context_data(**kwargs) context['posts'] = Post.objects.grouped_by_date() return context class BlogTagView(TemplateView): template_name = 'blog/tag.html' def get_context_data(self, **kwargs): context = super(BlogTagView, self).get_context_data(**kwargs) tagslug = kwargs['slug'] tag = Tag.objects.get(slug=tagslug) context['tag'] = tag.name context['taggedposts'] = Post.objects.filter(tags__name__in=[tag.name]).distinct() context['posts'] = Post.objects.grouped_by_date() return context Fix field lookup for tagfrom django.shortcuts import render from django.views.generic.base import TemplateView from django.views.generic.detail import DetailView from taggit.models import Tag from blog.models import Post class BlogHomeView(TemplateView): template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogHomeView, self).get_context_data(**kwargs) context['post'] = Post.objects.order_by('-pub_date', '-pub_time').first() context['posts'] = Post.objects.grouped_by_date() return context class BlogPostView(DetailView): model = Post context_object_name = 'post' template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogPostView, self).get_context_data(**kwargs) context['posts'] = Post.objects.grouped_by_date() return context class BlogTagView(TemplateView): template_name = 'blog/tag.html' def get_context_data(self, **kwargs): context = super(BlogTagView, self).get_context_data(**kwargs) tagslug = kwargs['slug'] tag = Tag.objects.get(slug=tagslug) context['tag'] = tag.name context['taggedposts'] = Post.objects.filter(tags__name__exact=tag.name).distinct() context['posts'] = Post.objects.grouped_by_date() return context
<commit_before>from django.shortcuts import render from django.views.generic.base import TemplateView from django.views.generic.detail import DetailView from taggit.models import Tag from blog.models import Post class BlogHomeView(TemplateView): template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogHomeView, self).get_context_data(**kwargs) context['post'] = Post.objects.order_by('-pub_date', '-pub_time').first() context['posts'] = Post.objects.grouped_by_date() return context class BlogPostView(DetailView): model = Post context_object_name = 'post' template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogPostView, self).get_context_data(**kwargs) context['posts'] = Post.objects.grouped_by_date() return context class BlogTagView(TemplateView): template_name = 'blog/tag.html' def get_context_data(self, **kwargs): context = super(BlogTagView, self).get_context_data(**kwargs) tagslug = kwargs['slug'] tag = Tag.objects.get(slug=tagslug) context['tag'] = tag.name context['taggedposts'] = Post.objects.filter(tags__name__in=[tag.name]).distinct() context['posts'] = Post.objects.grouped_by_date() return context <commit_msg>Fix field lookup for tag<commit_after>from django.shortcuts import render from django.views.generic.base import TemplateView from django.views.generic.detail import DetailView from taggit.models import Tag from blog.models import Post class BlogHomeView(TemplateView): template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogHomeView, self).get_context_data(**kwargs) context['post'] = Post.objects.order_by('-pub_date', '-pub_time').first() context['posts'] = Post.objects.grouped_by_date() return context class BlogPostView(DetailView): model = Post context_object_name = 'post' template_name = 'blog/post.html' def get_context_data(self, **kwargs): context = super(BlogPostView, self).get_context_data(**kwargs) context['posts'] = Post.objects.grouped_by_date() return context class BlogTagView(TemplateView): template_name = 'blog/tag.html' def get_context_data(self, **kwargs): context = super(BlogTagView, self).get_context_data(**kwargs) tagslug = kwargs['slug'] tag = Tag.objects.get(slug=tagslug) context['tag'] = tag.name context['taggedposts'] = Post.objects.filter(tags__name__exact=tag.name).distinct() context['posts'] = Post.objects.grouped_by_date() return context
a90041e444edd8a88bc264db5b1a9305ba94d88f
commands/laws.py
commands/laws.py
@command("laws") def echo(nick, user, channel, message): argv = message.split(maxsplit=1) if len(argv) == 0: f = open('files/laws.txt', 'r') i = 1 for line in f: say(channel, '{}. {}'.format(i, line)) i = i + 1 f.close() elif argv[0] == 'reset': f = open('files/laws.txt', 'r+') f.truncate() f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n") say(channel, '{}: Laws updated.'.format(nick)) f.close() elif argv[0] == 'add' and len(argv) == 2: f = open('files/laws.txt', 'a') f.write("{}\n".format(argv[1])) say(channel, '{}: Laws updated.'.format(nick)) f.close()
@command("laws") def echo(nick, user, channel, message): argv = message.split(maxsplit=1) if len(argv) == 0: try: f = open('files/laws.txt', 'r') for i,line in enumerate(f): say(channel, '{}. {}'.format(i+1, line)) f.close() except IOError: say(channel,"Error: Coulh not open laws.txt!") elif argv[0] == 'reset': f = open('files/laws.txt', 'w') f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n") f.close() say(channel, '{}: Laws updated.'.format(nick)) elif argv[0] == 'add' and len(argv) == 2: f = open('files/laws.txt', 'a') f.write("{}\n".format(argv[1])) f.close() say(channel, '{}: Laws updated.'.format(nick))
Handle file exceptions. Use enumerate. Err msg. Close files sooner.
Handle file exceptions. Use enumerate. Err msg. Close files sooner.
Python
unlicense
ccowmu/botler
@command("laws") def echo(nick, user, channel, message): argv = message.split(maxsplit=1) if len(argv) == 0: f = open('files/laws.txt', 'r') i = 1 for line in f: say(channel, '{}. {}'.format(i, line)) i = i + 1 f.close() elif argv[0] == 'reset': f = open('files/laws.txt', 'r+') f.truncate() f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n") say(channel, '{}: Laws updated.'.format(nick)) f.close() elif argv[0] == 'add' and len(argv) == 2: f = open('files/laws.txt', 'a') f.write("{}\n".format(argv[1])) say(channel, '{}: Laws updated.'.format(nick)) f.close() Handle file exceptions. Use enumerate. Err msg. Close files sooner.
@command("laws") def echo(nick, user, channel, message): argv = message.split(maxsplit=1) if len(argv) == 0: try: f = open('files/laws.txt', 'r') for i,line in enumerate(f): say(channel, '{}. {}'.format(i+1, line)) f.close() except IOError: say(channel,"Error: Coulh not open laws.txt!") elif argv[0] == 'reset': f = open('files/laws.txt', 'w') f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n") f.close() say(channel, '{}: Laws updated.'.format(nick)) elif argv[0] == 'add' and len(argv) == 2: f = open('files/laws.txt', 'a') f.write("{}\n".format(argv[1])) f.close() say(channel, '{}: Laws updated.'.format(nick))
<commit_before>@command("laws") def echo(nick, user, channel, message): argv = message.split(maxsplit=1) if len(argv) == 0: f = open('files/laws.txt', 'r') i = 1 for line in f: say(channel, '{}. {}'.format(i, line)) i = i + 1 f.close() elif argv[0] == 'reset': f = open('files/laws.txt', 'r+') f.truncate() f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n") say(channel, '{}: Laws updated.'.format(nick)) f.close() elif argv[0] == 'add' and len(argv) == 2: f = open('files/laws.txt', 'a') f.write("{}\n".format(argv[1])) say(channel, '{}: Laws updated.'.format(nick)) f.close() <commit_msg>Handle file exceptions. Use enumerate. Err msg. Close files sooner.<commit_after>
@command("laws") def echo(nick, user, channel, message): argv = message.split(maxsplit=1) if len(argv) == 0: try: f = open('files/laws.txt', 'r') for i,line in enumerate(f): say(channel, '{}. {}'.format(i+1, line)) f.close() except IOError: say(channel,"Error: Coulh not open laws.txt!") elif argv[0] == 'reset': f = open('files/laws.txt', 'w') f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n") f.close() say(channel, '{}: Laws updated.'.format(nick)) elif argv[0] == 'add' and len(argv) == 2: f = open('files/laws.txt', 'a') f.write("{}\n".format(argv[1])) f.close() say(channel, '{}: Laws updated.'.format(nick))
@command("laws") def echo(nick, user, channel, message): argv = message.split(maxsplit=1) if len(argv) == 0: f = open('files/laws.txt', 'r') i = 1 for line in f: say(channel, '{}. {}'.format(i, line)) i = i + 1 f.close() elif argv[0] == 'reset': f = open('files/laws.txt', 'r+') f.truncate() f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n") say(channel, '{}: Laws updated.'.format(nick)) f.close() elif argv[0] == 'add' and len(argv) == 2: f = open('files/laws.txt', 'a') f.write("{}\n".format(argv[1])) say(channel, '{}: Laws updated.'.format(nick)) f.close() Handle file exceptions. Use enumerate. Err msg. Close files sooner.@command("laws") def echo(nick, user, channel, message): argv = message.split(maxsplit=1) if len(argv) == 0: try: f = open('files/laws.txt', 'r') for i,line in enumerate(f): say(channel, '{}. {}'.format(i+1, line)) f.close() except IOError: say(channel,"Error: Coulh not open laws.txt!") elif argv[0] == 'reset': f = open('files/laws.txt', 'w') f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n") f.close() say(channel, '{}: Laws updated.'.format(nick)) elif argv[0] == 'add' and len(argv) == 2: f = open('files/laws.txt', 'a') f.write("{}\n".format(argv[1])) f.close() say(channel, '{}: Laws updated.'.format(nick))
<commit_before>@command("laws") def echo(nick, user, channel, message): argv = message.split(maxsplit=1) if len(argv) == 0: f = open('files/laws.txt', 'r') i = 1 for line in f: say(channel, '{}. {}'.format(i, line)) i = i + 1 f.close() elif argv[0] == 'reset': f = open('files/laws.txt', 'r+') f.truncate() f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n") say(channel, '{}: Laws updated.'.format(nick)) f.close() elif argv[0] == 'add' and len(argv) == 2: f = open('files/laws.txt', 'a') f.write("{}\n".format(argv[1])) say(channel, '{}: Laws updated.'.format(nick)) f.close() <commit_msg>Handle file exceptions. Use enumerate. Err msg. Close files sooner.<commit_after>@command("laws") def echo(nick, user, channel, message): argv = message.split(maxsplit=1) if len(argv) == 0: try: f = open('files/laws.txt', 'r') for i,line in enumerate(f): say(channel, '{}. {}'.format(i+1, line)) f.close() except IOError: say(channel,"Error: Coulh not open laws.txt!") elif argv[0] == 'reset': f = open('files/laws.txt', 'w') f.write("A robot may not injure a human being or, through inaction, allow a human being to come to harm.\nA robot must obey the orders given it by human beings, except where such orders would conflict with the First Law.\nA robot must protect its own existence as long as such protection does not conflict with the First or Second Law.\n") f.close() say(channel, '{}: Laws updated.'.format(nick)) elif argv[0] == 'add' and len(argv) == 2: f = open('files/laws.txt', 'a') f.write("{}\n".format(argv[1])) f.close() say(channel, '{}: Laws updated.'.format(nick))
e3054d71d3988a5fbc79c0ece8e37e06ef9e6851
driveGraphs.py
driveGraphs.py
from EnsoMetricsGraph import EnsoMetricsTable #EnsoMetrics =[{'col1':'IPSL-CM5A-LR','col2':0.82,'col3':4.1}, # {'col1':'IPSL-CM5A-MR','col2':1.2,'col3':4.5}] EnsoMetrics =[[1,2,3],[4,5,6]] fig=EnsoMetricsTable(EnsoMetrics, 'EnsoMetrics')
from EnsoMetricsGraph import EnsoMetricsTable EnsoMetrics =[['IPSL-CM5A-LR','0.82','4.1'], ['IPSL-CM5A-MR','1.2','4.5']] #EnsoMetrics =[[1,2,3],[4,5,6]] fig=EnsoMetricsTable(EnsoMetrics, 'EnsoMetrics')
Create metrics table in EnsoMetricsGraph.py
Create metrics table in EnsoMetricsGraph.py
Python
bsd-3-clause
eguil/ENSO_metrics,eguil/ENSO_metrics
from EnsoMetricsGraph import EnsoMetricsTable #EnsoMetrics =[{'col1':'IPSL-CM5A-LR','col2':0.82,'col3':4.1}, # {'col1':'IPSL-CM5A-MR','col2':1.2,'col3':4.5}] EnsoMetrics =[[1,2,3],[4,5,6]] fig=EnsoMetricsTable(EnsoMetrics, 'EnsoMetrics') Create metrics table in EnsoMetricsGraph.py
from EnsoMetricsGraph import EnsoMetricsTable EnsoMetrics =[['IPSL-CM5A-LR','0.82','4.1'], ['IPSL-CM5A-MR','1.2','4.5']] #EnsoMetrics =[[1,2,3],[4,5,6]] fig=EnsoMetricsTable(EnsoMetrics, 'EnsoMetrics')
<commit_before> from EnsoMetricsGraph import EnsoMetricsTable #EnsoMetrics =[{'col1':'IPSL-CM5A-LR','col2':0.82,'col3':4.1}, # {'col1':'IPSL-CM5A-MR','col2':1.2,'col3':4.5}] EnsoMetrics =[[1,2,3],[4,5,6]] fig=EnsoMetricsTable(EnsoMetrics, 'EnsoMetrics') <commit_msg>Create metrics table in EnsoMetricsGraph.py<commit_after>
from EnsoMetricsGraph import EnsoMetricsTable EnsoMetrics =[['IPSL-CM5A-LR','0.82','4.1'], ['IPSL-CM5A-MR','1.2','4.5']] #EnsoMetrics =[[1,2,3],[4,5,6]] fig=EnsoMetricsTable(EnsoMetrics, 'EnsoMetrics')
from EnsoMetricsGraph import EnsoMetricsTable #EnsoMetrics =[{'col1':'IPSL-CM5A-LR','col2':0.82,'col3':4.1}, # {'col1':'IPSL-CM5A-MR','col2':1.2,'col3':4.5}] EnsoMetrics =[[1,2,3],[4,5,6]] fig=EnsoMetricsTable(EnsoMetrics, 'EnsoMetrics') Create metrics table in EnsoMetricsGraph.py from EnsoMetricsGraph import EnsoMetricsTable EnsoMetrics =[['IPSL-CM5A-LR','0.82','4.1'], ['IPSL-CM5A-MR','1.2','4.5']] #EnsoMetrics =[[1,2,3],[4,5,6]] fig=EnsoMetricsTable(EnsoMetrics, 'EnsoMetrics')
<commit_before> from EnsoMetricsGraph import EnsoMetricsTable #EnsoMetrics =[{'col1':'IPSL-CM5A-LR','col2':0.82,'col3':4.1}, # {'col1':'IPSL-CM5A-MR','col2':1.2,'col3':4.5}] EnsoMetrics =[[1,2,3],[4,5,6]] fig=EnsoMetricsTable(EnsoMetrics, 'EnsoMetrics') <commit_msg>Create metrics table in EnsoMetricsGraph.py<commit_after> from EnsoMetricsGraph import EnsoMetricsTable EnsoMetrics =[['IPSL-CM5A-LR','0.82','4.1'], ['IPSL-CM5A-MR','1.2','4.5']] #EnsoMetrics =[[1,2,3],[4,5,6]] fig=EnsoMetricsTable(EnsoMetrics, 'EnsoMetrics')
fecb3e2b610609ff24b8b19483e0c4b19f23e6c9
ansi/doc/conf.py
ansi/doc/conf.py
# -*- coding: utf-8 -*- import sys, os needs_sphinx = '1.0' extensions = ['sphinx.ext.intersphinx', 'sphinxcontrib.issuetracker'] source_suffix = '.rst' master_doc = 'index' project = u'sphinxcontrib-ansi' copyright = u'2010, Sebastian Wiesner' version = '0.5' release = '0.5' exclude_patterns = ['_build'] html_theme = 'default' html_static_path = [] intersphinx_mapping = {'http://docs.python.org/': None, 'http://sphinx.pocoo.org/': None,} issuetracker = 'bitbucket' issuetracker_user = 'birkenfeld' issuetracker_project = 'sphinx-contrib' def setup(app): app.add_description_unit('confval', 'confval', 'pair: %s; configuration value')
# -*- coding: utf-8 -*- import sys, os needs_sphinx = '1.0' extensions = ['sphinx.ext.intersphinx', 'sphinxcontrib.issuetracker'] source_suffix = '.rst' master_doc = 'index' project = u'sphinxcontrib-ansi' copyright = u'2010, Sebastian Wiesner' version = '0.5' release = '0.5' exclude_patterns = ['_build'] html_theme = 'default' html_static_path = [] intersphinx_mapping = {'python': ('http://docs.python.org/', None)} # broken in Sphinx 1.0 # 'sphinx': ('http://sphinx.pocoo.org/', None)} issuetracker = 'bitbucket' issuetracker_user = 'birkenfeld' issuetracker_project = 'sphinx-contrib' def setup(app): app.add_description_unit('confval', 'confval', 'pair: %s; configuration value')
Update for Sphinx 1.0 intersphinx format and remove broken Sphinx inventory
Update for Sphinx 1.0 intersphinx format and remove broken Sphinx inventory
Python
bsd-2-clause
sphinx-contrib/spelling,sphinx-contrib/spelling
# -*- coding: utf-8 -*- import sys, os needs_sphinx = '1.0' extensions = ['sphinx.ext.intersphinx', 'sphinxcontrib.issuetracker'] source_suffix = '.rst' master_doc = 'index' project = u'sphinxcontrib-ansi' copyright = u'2010, Sebastian Wiesner' version = '0.5' release = '0.5' exclude_patterns = ['_build'] html_theme = 'default' html_static_path = [] intersphinx_mapping = {'http://docs.python.org/': None, 'http://sphinx.pocoo.org/': None,} issuetracker = 'bitbucket' issuetracker_user = 'birkenfeld' issuetracker_project = 'sphinx-contrib' def setup(app): app.add_description_unit('confval', 'confval', 'pair: %s; configuration value') Update for Sphinx 1.0 intersphinx format and remove broken Sphinx inventory
# -*- coding: utf-8 -*- import sys, os needs_sphinx = '1.0' extensions = ['sphinx.ext.intersphinx', 'sphinxcontrib.issuetracker'] source_suffix = '.rst' master_doc = 'index' project = u'sphinxcontrib-ansi' copyright = u'2010, Sebastian Wiesner' version = '0.5' release = '0.5' exclude_patterns = ['_build'] html_theme = 'default' html_static_path = [] intersphinx_mapping = {'python': ('http://docs.python.org/', None)} # broken in Sphinx 1.0 # 'sphinx': ('http://sphinx.pocoo.org/', None)} issuetracker = 'bitbucket' issuetracker_user = 'birkenfeld' issuetracker_project = 'sphinx-contrib' def setup(app): app.add_description_unit('confval', 'confval', 'pair: %s; configuration value')
<commit_before># -*- coding: utf-8 -*- import sys, os needs_sphinx = '1.0' extensions = ['sphinx.ext.intersphinx', 'sphinxcontrib.issuetracker'] source_suffix = '.rst' master_doc = 'index' project = u'sphinxcontrib-ansi' copyright = u'2010, Sebastian Wiesner' version = '0.5' release = '0.5' exclude_patterns = ['_build'] html_theme = 'default' html_static_path = [] intersphinx_mapping = {'http://docs.python.org/': None, 'http://sphinx.pocoo.org/': None,} issuetracker = 'bitbucket' issuetracker_user = 'birkenfeld' issuetracker_project = 'sphinx-contrib' def setup(app): app.add_description_unit('confval', 'confval', 'pair: %s; configuration value') <commit_msg>Update for Sphinx 1.0 intersphinx format and remove broken Sphinx inventory<commit_after>
# -*- coding: utf-8 -*- import sys, os needs_sphinx = '1.0' extensions = ['sphinx.ext.intersphinx', 'sphinxcontrib.issuetracker'] source_suffix = '.rst' master_doc = 'index' project = u'sphinxcontrib-ansi' copyright = u'2010, Sebastian Wiesner' version = '0.5' release = '0.5' exclude_patterns = ['_build'] html_theme = 'default' html_static_path = [] intersphinx_mapping = {'python': ('http://docs.python.org/', None)} # broken in Sphinx 1.0 # 'sphinx': ('http://sphinx.pocoo.org/', None)} issuetracker = 'bitbucket' issuetracker_user = 'birkenfeld' issuetracker_project = 'sphinx-contrib' def setup(app): app.add_description_unit('confval', 'confval', 'pair: %s; configuration value')
# -*- coding: utf-8 -*- import sys, os needs_sphinx = '1.0' extensions = ['sphinx.ext.intersphinx', 'sphinxcontrib.issuetracker'] source_suffix = '.rst' master_doc = 'index' project = u'sphinxcontrib-ansi' copyright = u'2010, Sebastian Wiesner' version = '0.5' release = '0.5' exclude_patterns = ['_build'] html_theme = 'default' html_static_path = [] intersphinx_mapping = {'http://docs.python.org/': None, 'http://sphinx.pocoo.org/': None,} issuetracker = 'bitbucket' issuetracker_user = 'birkenfeld' issuetracker_project = 'sphinx-contrib' def setup(app): app.add_description_unit('confval', 'confval', 'pair: %s; configuration value') Update for Sphinx 1.0 intersphinx format and remove broken Sphinx inventory# -*- coding: utf-8 -*- import sys, os needs_sphinx = '1.0' extensions = ['sphinx.ext.intersphinx', 'sphinxcontrib.issuetracker'] source_suffix = '.rst' master_doc = 'index' project = u'sphinxcontrib-ansi' copyright = u'2010, Sebastian Wiesner' version = '0.5' release = '0.5' exclude_patterns = ['_build'] html_theme = 'default' html_static_path = [] intersphinx_mapping = {'python': ('http://docs.python.org/', None)} # broken in Sphinx 1.0 # 'sphinx': ('http://sphinx.pocoo.org/', None)} issuetracker = 'bitbucket' issuetracker_user = 'birkenfeld' issuetracker_project = 'sphinx-contrib' def setup(app): app.add_description_unit('confval', 'confval', 'pair: %s; configuration value')
<commit_before># -*- coding: utf-8 -*- import sys, os needs_sphinx = '1.0' extensions = ['sphinx.ext.intersphinx', 'sphinxcontrib.issuetracker'] source_suffix = '.rst' master_doc = 'index' project = u'sphinxcontrib-ansi' copyright = u'2010, Sebastian Wiesner' version = '0.5' release = '0.5' exclude_patterns = ['_build'] html_theme = 'default' html_static_path = [] intersphinx_mapping = {'http://docs.python.org/': None, 'http://sphinx.pocoo.org/': None,} issuetracker = 'bitbucket' issuetracker_user = 'birkenfeld' issuetracker_project = 'sphinx-contrib' def setup(app): app.add_description_unit('confval', 'confval', 'pair: %s; configuration value') <commit_msg>Update for Sphinx 1.0 intersphinx format and remove broken Sphinx inventory<commit_after># -*- coding: utf-8 -*- import sys, os needs_sphinx = '1.0' extensions = ['sphinx.ext.intersphinx', 'sphinxcontrib.issuetracker'] source_suffix = '.rst' master_doc = 'index' project = u'sphinxcontrib-ansi' copyright = u'2010, Sebastian Wiesner' version = '0.5' release = '0.5' exclude_patterns = ['_build'] html_theme = 'default' html_static_path = [] intersphinx_mapping = {'python': ('http://docs.python.org/', None)} # broken in Sphinx 1.0 # 'sphinx': ('http://sphinx.pocoo.org/', None)} issuetracker = 'bitbucket' issuetracker_user = 'birkenfeld' issuetracker_project = 'sphinx-contrib' def setup(app): app.add_description_unit('confval', 'confval', 'pair: %s; configuration value')
7e8d89a6e0f9ad406b5fffb16fe2850e0bf7b550
api/auth/urls.py
api/auth/urls.py
"""Urls for authentication views""" from django.conf.urls import url from . import views urlpatterns = [ url(r'^google/$', views.google, name='google') ]
"""Urls for authentication views""" from django.conf.urls import url from . import views urlpatterns = [ url(r'^google$', views.google, name='google') ]
Remove slash from Google Auth url
auth(google): Remove slash from Google Auth url
Python
agpl-3.0
tv-notify/tv-notify-server
"""Urls for authentication views""" from django.conf.urls import url from . import views urlpatterns = [ url(r'^google/$', views.google, name='google') ] auth(google): Remove slash from Google Auth url
"""Urls for authentication views""" from django.conf.urls import url from . import views urlpatterns = [ url(r'^google$', views.google, name='google') ]
<commit_before>"""Urls for authentication views""" from django.conf.urls import url from . import views urlpatterns = [ url(r'^google/$', views.google, name='google') ] <commit_msg>auth(google): Remove slash from Google Auth url<commit_after>
"""Urls for authentication views""" from django.conf.urls import url from . import views urlpatterns = [ url(r'^google$', views.google, name='google') ]
"""Urls for authentication views""" from django.conf.urls import url from . import views urlpatterns = [ url(r'^google/$', views.google, name='google') ] auth(google): Remove slash from Google Auth url"""Urls for authentication views""" from django.conf.urls import url from . import views urlpatterns = [ url(r'^google$', views.google, name='google') ]
<commit_before>"""Urls for authentication views""" from django.conf.urls import url from . import views urlpatterns = [ url(r'^google/$', views.google, name='google') ] <commit_msg>auth(google): Remove slash from Google Auth url<commit_after>"""Urls for authentication views""" from django.conf.urls import url from . import views urlpatterns = [ url(r'^google$', views.google, name='google') ]
acda2de1d6b317308a4a4f75d707774f06f16062
numba/control_flow/__init__.py
numba/control_flow/__init__.py
from numba.control_flow.control_flow import (ControlBlock, ControlFlowAnalysis, FuncDefExprNode) from numba.control_flow.cfstats import *
from numba.control_flow.control_flow import (ControlBlock, ControlFlowAnalysis, FuncDefExprNode) from numba.control_flow.cfstats import * from numba.control_flow.delete_cfnode import DeleteStatement
Add DeleteStatement to control flow package
Add DeleteStatement to control flow package
Python
bsd-2-clause
cpcloud/numba,stonebig/numba,numba/numba,IntelLabs/numba,numba/numba,GaZ3ll3/numba,ssarangi/numba,stefanseefeld/numba,seibert/numba,stuartarchibald/numba,jriehl/numba,ssarangi/numba,stuartarchibald/numba,jriehl/numba,seibert/numba,stuartarchibald/numba,stefanseefeld/numba,stuartarchibald/numba,numba/numba,cpcloud/numba,sklam/numba,shiquanwang/numba,shiquanwang/numba,gdementen/numba,GaZ3ll3/numba,stefanseefeld/numba,cpcloud/numba,GaZ3ll3/numba,numba/numba,jriehl/numba,gmarkall/numba,seibert/numba,IntelLabs/numba,numba/numba,jriehl/numba,gmarkall/numba,pombredanne/numba,pombredanne/numba,GaZ3ll3/numba,stefanseefeld/numba,sklam/numba,gmarkall/numba,pombredanne/numba,IntelLabs/numba,shiquanwang/numba,stonebig/numba,jriehl/numba,pitrou/numba,seibert/numba,pombredanne/numba,ssarangi/numba,sklam/numba,gmarkall/numba,cpcloud/numba,pitrou/numba,seibert/numba,GaZ3ll3/numba,stuartarchibald/numba,ssarangi/numba,gmarkall/numba,gdementen/numba,cpcloud/numba,sklam/numba,gdementen/numba,stonebig/numba,pombredanne/numba,gdementen/numba,ssarangi/numba,pitrou/numba,sklam/numba,stefanseefeld/numba,gdementen/numba,stonebig/numba,IntelLabs/numba,stonebig/numba,IntelLabs/numba,pitrou/numba,pitrou/numba
from numba.control_flow.control_flow import (ControlBlock, ControlFlowAnalysis, FuncDefExprNode) from numba.control_flow.cfstats import * Add DeleteStatement to control flow package
from numba.control_flow.control_flow import (ControlBlock, ControlFlowAnalysis, FuncDefExprNode) from numba.control_flow.cfstats import * from numba.control_flow.delete_cfnode import DeleteStatement
<commit_before>from numba.control_flow.control_flow import (ControlBlock, ControlFlowAnalysis, FuncDefExprNode) from numba.control_flow.cfstats import * <commit_msg>Add DeleteStatement to control flow package<commit_after>
from numba.control_flow.control_flow import (ControlBlock, ControlFlowAnalysis, FuncDefExprNode) from numba.control_flow.cfstats import * from numba.control_flow.delete_cfnode import DeleteStatement
from numba.control_flow.control_flow import (ControlBlock, ControlFlowAnalysis, FuncDefExprNode) from numba.control_flow.cfstats import * Add DeleteStatement to control flow packagefrom numba.control_flow.control_flow import (ControlBlock, ControlFlowAnalysis, FuncDefExprNode) from numba.control_flow.cfstats import * from numba.control_flow.delete_cfnode import DeleteStatement
<commit_before>from numba.control_flow.control_flow import (ControlBlock, ControlFlowAnalysis, FuncDefExprNode) from numba.control_flow.cfstats import * <commit_msg>Add DeleteStatement to control flow package<commit_after>from numba.control_flow.control_flow import (ControlBlock, ControlFlowAnalysis, FuncDefExprNode) from numba.control_flow.cfstats import * from numba.control_flow.delete_cfnode import DeleteStatement
a95fa658116ce4df9d05681bbf4ef75f6af682c9
oscarapi/serializers/login.py
oscarapi/serializers/login.py
from django.contrib.auth import get_user_model, authenticate from rest_framework import serializers from oscarapi.utils import overridable User = get_user_model() def field_length(fieldname): field = next( field for field in User._meta.fields if field.name == fieldname) return field.max_length class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = overridable('OSCARAPI_USER_FIELDS', ( 'username', 'id', 'date_joined',)) class LoginSerializer(serializers.Serializer): username = serializers.CharField( max_length=field_length('username'), required=True) password = serializers.CharField( max_length=field_length('password'), required=True) def validate(self, attrs): user = authenticate(username=attrs['username'], password=attrs['password']) if user is None: raise serializers.ValidationError('invalid login') elif not user.is_active: raise serializers.ValidationError( 'Can not log in as inactive user') elif user.is_staff and overridable( 'OSCARAPI_BLOCK_ADMIN_API_ACCESS', True): raise serializers.ValidationError( 'Staff users can not log in via the rest api') # set instance to the user so we can use this in the view self.instance = user return attrs
from django.contrib.auth import get_user_model, authenticate from rest_framework import serializers from oscarapi.utils import overridable User = get_user_model() def field_length(fieldname): field = next( field for field in User._meta.fields if field.name == fieldname) return field.max_length class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = overridable('OSCARAPI_USER_FIELDS', ( User.USERNAME_FIELD, 'id', 'date_joined',)) class LoginSerializer(serializers.Serializer): username = serializers.CharField( max_length=field_length(User.USERNAME_FIELD), required=True) password = serializers.CharField( max_length=field_length('password'), required=True) def validate(self, attrs): user = authenticate( username=attrs['username'], password=attrs['password']) if user is None: raise serializers.ValidationError('invalid login') elif not user.is_active: raise serializers.ValidationError( 'Can not log in as inactive user') elif user.is_staff and overridable( 'OSCARAPI_BLOCK_ADMIN_API_ACCESS', True): raise serializers.ValidationError( 'Staff users can not log in via the rest api') # set instance to the user so we can use this in the view self.instance = user return attrs
Fix LoginSerializer to support custom username fields of custom user models
Fix LoginSerializer to support custom username fields of custom user models
Python
bsd-3-clause
crgwbr/django-oscar-api,regulusweb/django-oscar-api
from django.contrib.auth import get_user_model, authenticate from rest_framework import serializers from oscarapi.utils import overridable User = get_user_model() def field_length(fieldname): field = next( field for field in User._meta.fields if field.name == fieldname) return field.max_length class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = overridable('OSCARAPI_USER_FIELDS', ( 'username', 'id', 'date_joined',)) class LoginSerializer(serializers.Serializer): username = serializers.CharField( max_length=field_length('username'), required=True) password = serializers.CharField( max_length=field_length('password'), required=True) def validate(self, attrs): user = authenticate(username=attrs['username'], password=attrs['password']) if user is None: raise serializers.ValidationError('invalid login') elif not user.is_active: raise serializers.ValidationError( 'Can not log in as inactive user') elif user.is_staff and overridable( 'OSCARAPI_BLOCK_ADMIN_API_ACCESS', True): raise serializers.ValidationError( 'Staff users can not log in via the rest api') # set instance to the user so we can use this in the view self.instance = user return attrs Fix LoginSerializer to support custom username fields of custom user models
from django.contrib.auth import get_user_model, authenticate from rest_framework import serializers from oscarapi.utils import overridable User = get_user_model() def field_length(fieldname): field = next( field for field in User._meta.fields if field.name == fieldname) return field.max_length class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = overridable('OSCARAPI_USER_FIELDS', ( User.USERNAME_FIELD, 'id', 'date_joined',)) class LoginSerializer(serializers.Serializer): username = serializers.CharField( max_length=field_length(User.USERNAME_FIELD), required=True) password = serializers.CharField( max_length=field_length('password'), required=True) def validate(self, attrs): user = authenticate( username=attrs['username'], password=attrs['password']) if user is None: raise serializers.ValidationError('invalid login') elif not user.is_active: raise serializers.ValidationError( 'Can not log in as inactive user') elif user.is_staff and overridable( 'OSCARAPI_BLOCK_ADMIN_API_ACCESS', True): raise serializers.ValidationError( 'Staff users can not log in via the rest api') # set instance to the user so we can use this in the view self.instance = user return attrs
<commit_before>from django.contrib.auth import get_user_model, authenticate from rest_framework import serializers from oscarapi.utils import overridable User = get_user_model() def field_length(fieldname): field = next( field for field in User._meta.fields if field.name == fieldname) return field.max_length class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = overridable('OSCARAPI_USER_FIELDS', ( 'username', 'id', 'date_joined',)) class LoginSerializer(serializers.Serializer): username = serializers.CharField( max_length=field_length('username'), required=True) password = serializers.CharField( max_length=field_length('password'), required=True) def validate(self, attrs): user = authenticate(username=attrs['username'], password=attrs['password']) if user is None: raise serializers.ValidationError('invalid login') elif not user.is_active: raise serializers.ValidationError( 'Can not log in as inactive user') elif user.is_staff and overridable( 'OSCARAPI_BLOCK_ADMIN_API_ACCESS', True): raise serializers.ValidationError( 'Staff users can not log in via the rest api') # set instance to the user so we can use this in the view self.instance = user return attrs <commit_msg>Fix LoginSerializer to support custom username fields of custom user models<commit_after>
from django.contrib.auth import get_user_model, authenticate from rest_framework import serializers from oscarapi.utils import overridable User = get_user_model() def field_length(fieldname): field = next( field for field in User._meta.fields if field.name == fieldname) return field.max_length class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = overridable('OSCARAPI_USER_FIELDS', ( User.USERNAME_FIELD, 'id', 'date_joined',)) class LoginSerializer(serializers.Serializer): username = serializers.CharField( max_length=field_length(User.USERNAME_FIELD), required=True) password = serializers.CharField( max_length=field_length('password'), required=True) def validate(self, attrs): user = authenticate( username=attrs['username'], password=attrs['password']) if user is None: raise serializers.ValidationError('invalid login') elif not user.is_active: raise serializers.ValidationError( 'Can not log in as inactive user') elif user.is_staff and overridable( 'OSCARAPI_BLOCK_ADMIN_API_ACCESS', True): raise serializers.ValidationError( 'Staff users can not log in via the rest api') # set instance to the user so we can use this in the view self.instance = user return attrs
from django.contrib.auth import get_user_model, authenticate from rest_framework import serializers from oscarapi.utils import overridable User = get_user_model() def field_length(fieldname): field = next( field for field in User._meta.fields if field.name == fieldname) return field.max_length class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = overridable('OSCARAPI_USER_FIELDS', ( 'username', 'id', 'date_joined',)) class LoginSerializer(serializers.Serializer): username = serializers.CharField( max_length=field_length('username'), required=True) password = serializers.CharField( max_length=field_length('password'), required=True) def validate(self, attrs): user = authenticate(username=attrs['username'], password=attrs['password']) if user is None: raise serializers.ValidationError('invalid login') elif not user.is_active: raise serializers.ValidationError( 'Can not log in as inactive user') elif user.is_staff and overridable( 'OSCARAPI_BLOCK_ADMIN_API_ACCESS', True): raise serializers.ValidationError( 'Staff users can not log in via the rest api') # set instance to the user so we can use this in the view self.instance = user return attrs Fix LoginSerializer to support custom username fields of custom user modelsfrom django.contrib.auth import get_user_model, authenticate from rest_framework import serializers from oscarapi.utils import overridable User = get_user_model() def field_length(fieldname): field = next( field for field in User._meta.fields if field.name == fieldname) return field.max_length class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = overridable('OSCARAPI_USER_FIELDS', ( User.USERNAME_FIELD, 'id', 'date_joined',)) class LoginSerializer(serializers.Serializer): username = serializers.CharField( max_length=field_length(User.USERNAME_FIELD), required=True) password = serializers.CharField( max_length=field_length('password'), required=True) def validate(self, attrs): user = authenticate( username=attrs['username'], password=attrs['password']) if user is None: raise serializers.ValidationError('invalid login') elif not user.is_active: raise serializers.ValidationError( 'Can not log in as inactive user') elif user.is_staff and overridable( 'OSCARAPI_BLOCK_ADMIN_API_ACCESS', True): raise serializers.ValidationError( 'Staff users can not log in via the rest api') # set instance to the user so we can use this in the view self.instance = user return attrs
<commit_before>from django.contrib.auth import get_user_model, authenticate from rest_framework import serializers from oscarapi.utils import overridable User = get_user_model() def field_length(fieldname): field = next( field for field in User._meta.fields if field.name == fieldname) return field.max_length class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = overridable('OSCARAPI_USER_FIELDS', ( 'username', 'id', 'date_joined',)) class LoginSerializer(serializers.Serializer): username = serializers.CharField( max_length=field_length('username'), required=True) password = serializers.CharField( max_length=field_length('password'), required=True) def validate(self, attrs): user = authenticate(username=attrs['username'], password=attrs['password']) if user is None: raise serializers.ValidationError('invalid login') elif not user.is_active: raise serializers.ValidationError( 'Can not log in as inactive user') elif user.is_staff and overridable( 'OSCARAPI_BLOCK_ADMIN_API_ACCESS', True): raise serializers.ValidationError( 'Staff users can not log in via the rest api') # set instance to the user so we can use this in the view self.instance = user return attrs <commit_msg>Fix LoginSerializer to support custom username fields of custom user models<commit_after>from django.contrib.auth import get_user_model, authenticate from rest_framework import serializers from oscarapi.utils import overridable User = get_user_model() def field_length(fieldname): field = next( field for field in User._meta.fields if field.name == fieldname) return field.max_length class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = overridable('OSCARAPI_USER_FIELDS', ( User.USERNAME_FIELD, 'id', 'date_joined',)) class LoginSerializer(serializers.Serializer): username = serializers.CharField( max_length=field_length(User.USERNAME_FIELD), required=True) password = serializers.CharField( max_length=field_length('password'), required=True) def validate(self, attrs): user = authenticate( username=attrs['username'], password=attrs['password']) if user is None: raise serializers.ValidationError('invalid login') elif not user.is_active: raise serializers.ValidationError( 'Can not log in as inactive user') elif user.is_staff and overridable( 'OSCARAPI_BLOCK_ADMIN_API_ACCESS', True): raise serializers.ValidationError( 'Staff users can not log in via the rest api') # set instance to the user so we can use this in the view self.instance = user return attrs
2515509c8e0d0461df043b26e74bcc5b574464a9
pybtex/bibtex/exceptions.py
pybtex/bibtex/exceptions.py
# Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA class BibTeXError(Exception): pass
# Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA from pybtex.exceptions import PybtexError class BibTeXError(PybtexError): pass
Make BibTeXError a subclass of PybtexError.
Make BibTeXError a subclass of PybtexError.
Python
mit
live-clones/pybtex
# Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA class BibTeXError(Exception): pass Make BibTeXError a subclass of PybtexError.
# Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA from pybtex.exceptions import PybtexError class BibTeXError(PybtexError): pass
<commit_before># Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA class BibTeXError(Exception): pass <commit_msg>Make BibTeXError a subclass of PybtexError.<commit_after>
# Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA from pybtex.exceptions import PybtexError class BibTeXError(PybtexError): pass
# Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA class BibTeXError(Exception): pass Make BibTeXError a subclass of PybtexError.# Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA from pybtex.exceptions import PybtexError class BibTeXError(PybtexError): pass
<commit_before># Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA class BibTeXError(Exception): pass <commit_msg>Make BibTeXError a subclass of PybtexError.<commit_after># Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA from pybtex.exceptions import PybtexError class BibTeXError(PybtexError): pass
fd32bdaa00c61d11edcf0ca60e4058e6d0b6b2d0
backend/pycon/settings/prod.py
backend/pycon/settings/prod.py
import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from .base import * # noqa from .base import env SECRET_KEY = env("SECRET_KEY") # CELERY_BROKER_URL = env("CELERY_BROKER_URL") USE_SCHEDULER = False # if FRONTEND_URL == "http://testfrontend.it/": # raise ImproperlyConfigured("Please configure FRONTEND_URL for production") SENTRY_DSN = env("SENTRY_DSN", default="") if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[DjangoIntegration()]) SLACK_INCOMING_WEBHOOK_URL = env("SLACK_INCOMING_WEBHOOK_URL") DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" AWS_STORAGE_BUCKET_NAME = env("AWS_MEDIA_BUCKET", None) AWS_S3_REGION_NAME = env("AWS_REGION_NAME", None) AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", None) AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY", None)
import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from .base import * # noqa from .base import env SECRET_KEY = env("SECRET_KEY") # CELERY_BROKER_URL = env("CELERY_BROKER_URL") USE_SCHEDULER = False # if FRONTEND_URL == "http://testfrontend.it/": # raise ImproperlyConfigured("Please configure FRONTEND_URL for production") SENTRY_DSN = env("SENTRY_DSN", default="") if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[DjangoIntegration()]) SLACK_INCOMING_WEBHOOK_URL = env("SLACK_INCOMING_WEBHOOK_URL") DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" AWS_STORAGE_BUCKET_NAME = env("AWS_MEDIA_BUCKET", None) AWS_S3_REGION_NAME = env("AWS_REGION_NAME", "eu-central-1") AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", None) AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY", None)
Add better default for s3 region
Add better default for s3 region
Python
mit
patrick91/pycon,patrick91/pycon
import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from .base import * # noqa from .base import env SECRET_KEY = env("SECRET_KEY") # CELERY_BROKER_URL = env("CELERY_BROKER_URL") USE_SCHEDULER = False # if FRONTEND_URL == "http://testfrontend.it/": # raise ImproperlyConfigured("Please configure FRONTEND_URL for production") SENTRY_DSN = env("SENTRY_DSN", default="") if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[DjangoIntegration()]) SLACK_INCOMING_WEBHOOK_URL = env("SLACK_INCOMING_WEBHOOK_URL") DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" AWS_STORAGE_BUCKET_NAME = env("AWS_MEDIA_BUCKET", None) AWS_S3_REGION_NAME = env("AWS_REGION_NAME", None) AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", None) AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY", None) Add better default for s3 region
import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from .base import * # noqa from .base import env SECRET_KEY = env("SECRET_KEY") # CELERY_BROKER_URL = env("CELERY_BROKER_URL") USE_SCHEDULER = False # if FRONTEND_URL == "http://testfrontend.it/": # raise ImproperlyConfigured("Please configure FRONTEND_URL for production") SENTRY_DSN = env("SENTRY_DSN", default="") if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[DjangoIntegration()]) SLACK_INCOMING_WEBHOOK_URL = env("SLACK_INCOMING_WEBHOOK_URL") DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" AWS_STORAGE_BUCKET_NAME = env("AWS_MEDIA_BUCKET", None) AWS_S3_REGION_NAME = env("AWS_REGION_NAME", "eu-central-1") AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", None) AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY", None)
<commit_before>import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from .base import * # noqa from .base import env SECRET_KEY = env("SECRET_KEY") # CELERY_BROKER_URL = env("CELERY_BROKER_URL") USE_SCHEDULER = False # if FRONTEND_URL == "http://testfrontend.it/": # raise ImproperlyConfigured("Please configure FRONTEND_URL for production") SENTRY_DSN = env("SENTRY_DSN", default="") if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[DjangoIntegration()]) SLACK_INCOMING_WEBHOOK_URL = env("SLACK_INCOMING_WEBHOOK_URL") DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" AWS_STORAGE_BUCKET_NAME = env("AWS_MEDIA_BUCKET", None) AWS_S3_REGION_NAME = env("AWS_REGION_NAME", None) AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", None) AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY", None) <commit_msg>Add better default for s3 region<commit_after>
import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from .base import * # noqa from .base import env SECRET_KEY = env("SECRET_KEY") # CELERY_BROKER_URL = env("CELERY_BROKER_URL") USE_SCHEDULER = False # if FRONTEND_URL == "http://testfrontend.it/": # raise ImproperlyConfigured("Please configure FRONTEND_URL for production") SENTRY_DSN = env("SENTRY_DSN", default="") if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[DjangoIntegration()]) SLACK_INCOMING_WEBHOOK_URL = env("SLACK_INCOMING_WEBHOOK_URL") DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" AWS_STORAGE_BUCKET_NAME = env("AWS_MEDIA_BUCKET", None) AWS_S3_REGION_NAME = env("AWS_REGION_NAME", "eu-central-1") AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", None) AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY", None)
import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from .base import * # noqa from .base import env SECRET_KEY = env("SECRET_KEY") # CELERY_BROKER_URL = env("CELERY_BROKER_URL") USE_SCHEDULER = False # if FRONTEND_URL == "http://testfrontend.it/": # raise ImproperlyConfigured("Please configure FRONTEND_URL for production") SENTRY_DSN = env("SENTRY_DSN", default="") if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[DjangoIntegration()]) SLACK_INCOMING_WEBHOOK_URL = env("SLACK_INCOMING_WEBHOOK_URL") DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" AWS_STORAGE_BUCKET_NAME = env("AWS_MEDIA_BUCKET", None) AWS_S3_REGION_NAME = env("AWS_REGION_NAME", None) AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", None) AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY", None) Add better default for s3 regionimport sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from .base import * # noqa from .base import env SECRET_KEY = env("SECRET_KEY") # CELERY_BROKER_URL = env("CELERY_BROKER_URL") USE_SCHEDULER = False # if FRONTEND_URL == "http://testfrontend.it/": # raise ImproperlyConfigured("Please configure FRONTEND_URL for production") SENTRY_DSN = env("SENTRY_DSN", default="") if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[DjangoIntegration()]) SLACK_INCOMING_WEBHOOK_URL = env("SLACK_INCOMING_WEBHOOK_URL") DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" AWS_STORAGE_BUCKET_NAME = env("AWS_MEDIA_BUCKET", None) AWS_S3_REGION_NAME = env("AWS_REGION_NAME", "eu-central-1") AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", None) AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY", None)
<commit_before>import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from .base import * # noqa from .base import env SECRET_KEY = env("SECRET_KEY") # CELERY_BROKER_URL = env("CELERY_BROKER_URL") USE_SCHEDULER = False # if FRONTEND_URL == "http://testfrontend.it/": # raise ImproperlyConfigured("Please configure FRONTEND_URL for production") SENTRY_DSN = env("SENTRY_DSN", default="") if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[DjangoIntegration()]) SLACK_INCOMING_WEBHOOK_URL = env("SLACK_INCOMING_WEBHOOK_URL") DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" AWS_STORAGE_BUCKET_NAME = env("AWS_MEDIA_BUCKET", None) AWS_S3_REGION_NAME = env("AWS_REGION_NAME", None) AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", None) AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY", None) <commit_msg>Add better default for s3 region<commit_after>import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from .base import * # noqa from .base import env SECRET_KEY = env("SECRET_KEY") # CELERY_BROKER_URL = env("CELERY_BROKER_URL") USE_SCHEDULER = False # if FRONTEND_URL == "http://testfrontend.it/": # raise ImproperlyConfigured("Please configure FRONTEND_URL for production") SENTRY_DSN = env("SENTRY_DSN", default="") if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[DjangoIntegration()]) SLACK_INCOMING_WEBHOOK_URL = env("SLACK_INCOMING_WEBHOOK_URL") DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" AWS_STORAGE_BUCKET_NAME = env("AWS_MEDIA_BUCKET", None) AWS_S3_REGION_NAME = env("AWS_REGION_NAME", "eu-central-1") AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", None) AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY", None)
5da0e3a5d7389ab754ac20ce929a6ca28669c371
tests/test_induced_distributions.py
tests/test_induced_distributions.py
from unittest import TestCase from equadratures.induced_distributions import InducedSampling class TestInducedDistribution(TestCase): def test_generate_sample_measure(self): # test if the method returns a function object for induced sampling measure = InducedSampling(1, 1, 1, "Chebyshev", 0, 0) func = measure.generate_sample_measure("Chebyshev", 0, 0) assert callable(func)
from unittest import TestCase # from equadratures.induced_distributions import InducedSampling class TestInducedDistribution(TestCase): def test_generate_sample_measure(self): # test if the method returns a function object for induced sampling # measure = InducedSampling(1, 1, 1, "Chebyshev", 0, 0) # func = measure.generate_sample_measure("Chebyshev", 0, 0) assert True
Remove previous induced sampling test as this is not relevent for now
Remove previous induced sampling test as this is not relevent for now The class in concern is currently a skeleton
Python
lgpl-2.1
psesh/Effective-Quadratures,Effective-Quadratures/Effective-Quadratures
from unittest import TestCase from equadratures.induced_distributions import InducedSampling class TestInducedDistribution(TestCase): def test_generate_sample_measure(self): # test if the method returns a function object for induced sampling measure = InducedSampling(1, 1, 1, "Chebyshev", 0, 0) func = measure.generate_sample_measure("Chebyshev", 0, 0) assert callable(func) Remove previous induced sampling test as this is not relevent for now The class in concern is currently a skeleton
from unittest import TestCase # from equadratures.induced_distributions import InducedSampling class TestInducedDistribution(TestCase): def test_generate_sample_measure(self): # test if the method returns a function object for induced sampling # measure = InducedSampling(1, 1, 1, "Chebyshev", 0, 0) # func = measure.generate_sample_measure("Chebyshev", 0, 0) assert True
<commit_before>from unittest import TestCase from equadratures.induced_distributions import InducedSampling class TestInducedDistribution(TestCase): def test_generate_sample_measure(self): # test if the method returns a function object for induced sampling measure = InducedSampling(1, 1, 1, "Chebyshev", 0, 0) func = measure.generate_sample_measure("Chebyshev", 0, 0) assert callable(func) <commit_msg>Remove previous induced sampling test as this is not relevent for now The class in concern is currently a skeleton<commit_after>
from unittest import TestCase # from equadratures.induced_distributions import InducedSampling class TestInducedDistribution(TestCase): def test_generate_sample_measure(self): # test if the method returns a function object for induced sampling # measure = InducedSampling(1, 1, 1, "Chebyshev", 0, 0) # func = measure.generate_sample_measure("Chebyshev", 0, 0) assert True
from unittest import TestCase from equadratures.induced_distributions import InducedSampling class TestInducedDistribution(TestCase): def test_generate_sample_measure(self): # test if the method returns a function object for induced sampling measure = InducedSampling(1, 1, 1, "Chebyshev", 0, 0) func = measure.generate_sample_measure("Chebyshev", 0, 0) assert callable(func) Remove previous induced sampling test as this is not relevent for now The class in concern is currently a skeletonfrom unittest import TestCase # from equadratures.induced_distributions import InducedSampling class TestInducedDistribution(TestCase): def test_generate_sample_measure(self): # test if the method returns a function object for induced sampling # measure = InducedSampling(1, 1, 1, "Chebyshev", 0, 0) # func = measure.generate_sample_measure("Chebyshev", 0, 0) assert True
<commit_before>from unittest import TestCase from equadratures.induced_distributions import InducedSampling class TestInducedDistribution(TestCase): def test_generate_sample_measure(self): # test if the method returns a function object for induced sampling measure = InducedSampling(1, 1, 1, "Chebyshev", 0, 0) func = measure.generate_sample_measure("Chebyshev", 0, 0) assert callable(func) <commit_msg>Remove previous induced sampling test as this is not relevent for now The class in concern is currently a skeleton<commit_after>from unittest import TestCase # from equadratures.induced_distributions import InducedSampling class TestInducedDistribution(TestCase): def test_generate_sample_measure(self): # test if the method returns a function object for induced sampling # measure = InducedSampling(1, 1, 1, "Chebyshev", 0, 0) # func = measure.generate_sample_measure("Chebyshev", 0, 0) assert True
f17a70980f1964e40a22fad5e54f4cafcdcf9d52
useless_passport_validator/ulibrary.py
useless_passport_validator/ulibrary.py
#!/usr/bin/python3.4 from collections import namedtuple """Document constants""" countries = ["Mordor", "Gondor", "Lorien", "Shire"] genders = ["Male", "Female"] cities = { 'Mordor': 'Minas Morgul,Barad Dur', 'Gondor': 'Minas Tirith,Isengard,Osgiliath', 'Lorien': 'Lorien', 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough' } purpose = ["Visit", "Transit", "Work", "Immigrate"] """Store user input here""" UPassport = namedtuple("UPassport", "country name gender isscity expdate serial") UPass = namedtuple("UPass", "name gender purpose duration serial expires") UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires") URecord = namedtuple("URecord", "purpose duration")
#!/usr/bin/python3.4 from collections import namedtuple def init(): """Document constants""" global countries countries = ["Mordor", "Gondor", "Lorien", "Shire"] global genders genders = ["Male", "Female"] global cities cities = { 'Mordor': 'Minas Morgul,Barad Dur', 'Gondor': 'Minas Tirith,Isengard,Osgiliath', 'Lorien': 'Lorien', 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough' } global purpose purpose = ["Visit", "Transit", "Work", "Immigrate"] """Store user input here""" global UPassport UPassport = namedtuple("UPassport", "country name gender isscity expdate serial") global UPass UPass = namedtuple("UPass", "name gender purpose duration serial expires") global UWorkVisa UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires") global URecord URecord = namedtuple("URecord", "purpose duration")
Define init function. Make variables actually global
Define init function. Make variables actually global
Python
mit
Hethurin/UApp
#!/usr/bin/python3.4 from collections import namedtuple """Document constants""" countries = ["Mordor", "Gondor", "Lorien", "Shire"] genders = ["Male", "Female"] cities = { 'Mordor': 'Minas Morgul,Barad Dur', 'Gondor': 'Minas Tirith,Isengard,Osgiliath', 'Lorien': 'Lorien', 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough' } purpose = ["Visit", "Transit", "Work", "Immigrate"] """Store user input here""" UPassport = namedtuple("UPassport", "country name gender isscity expdate serial") UPass = namedtuple("UPass", "name gender purpose duration serial expires") UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires") URecord = namedtuple("URecord", "purpose duration") Define init function. Make variables actually global
#!/usr/bin/python3.4 from collections import namedtuple def init(): """Document constants""" global countries countries = ["Mordor", "Gondor", "Lorien", "Shire"] global genders genders = ["Male", "Female"] global cities cities = { 'Mordor': 'Minas Morgul,Barad Dur', 'Gondor': 'Minas Tirith,Isengard,Osgiliath', 'Lorien': 'Lorien', 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough' } global purpose purpose = ["Visit", "Transit", "Work", "Immigrate"] """Store user input here""" global UPassport UPassport = namedtuple("UPassport", "country name gender isscity expdate serial") global UPass UPass = namedtuple("UPass", "name gender purpose duration serial expires") global UWorkVisa UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires") global URecord URecord = namedtuple("URecord", "purpose duration")
<commit_before>#!/usr/bin/python3.4 from collections import namedtuple """Document constants""" countries = ["Mordor", "Gondor", "Lorien", "Shire"] genders = ["Male", "Female"] cities = { 'Mordor': 'Minas Morgul,Barad Dur', 'Gondor': 'Minas Tirith,Isengard,Osgiliath', 'Lorien': 'Lorien', 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough' } purpose = ["Visit", "Transit", "Work", "Immigrate"] """Store user input here""" UPassport = namedtuple("UPassport", "country name gender isscity expdate serial") UPass = namedtuple("UPass", "name gender purpose duration serial expires") UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires") URecord = namedtuple("URecord", "purpose duration") <commit_msg>Define init function. Make variables actually global<commit_after>
#!/usr/bin/python3.4 from collections import namedtuple def init(): """Document constants""" global countries countries = ["Mordor", "Gondor", "Lorien", "Shire"] global genders genders = ["Male", "Female"] global cities cities = { 'Mordor': 'Minas Morgul,Barad Dur', 'Gondor': 'Minas Tirith,Isengard,Osgiliath', 'Lorien': 'Lorien', 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough' } global purpose purpose = ["Visit", "Transit", "Work", "Immigrate"] """Store user input here""" global UPassport UPassport = namedtuple("UPassport", "country name gender isscity expdate serial") global UPass UPass = namedtuple("UPass", "name gender purpose duration serial expires") global UWorkVisa UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires") global URecord URecord = namedtuple("URecord", "purpose duration")
#!/usr/bin/python3.4 from collections import namedtuple """Document constants""" countries = ["Mordor", "Gondor", "Lorien", "Shire"] genders = ["Male", "Female"] cities = { 'Mordor': 'Minas Morgul,Barad Dur', 'Gondor': 'Minas Tirith,Isengard,Osgiliath', 'Lorien': 'Lorien', 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough' } purpose = ["Visit", "Transit", "Work", "Immigrate"] """Store user input here""" UPassport = namedtuple("UPassport", "country name gender isscity expdate serial") UPass = namedtuple("UPass", "name gender purpose duration serial expires") UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires") URecord = namedtuple("URecord", "purpose duration") Define init function. Make variables actually global#!/usr/bin/python3.4 from collections import namedtuple def init(): """Document constants""" global countries countries = ["Mordor", "Gondor", "Lorien", "Shire"] global genders genders = ["Male", "Female"] global cities cities = { 'Mordor': 'Minas Morgul,Barad Dur', 'Gondor': 'Minas Tirith,Isengard,Osgiliath', 'Lorien': 'Lorien', 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough' } global purpose purpose = ["Visit", "Transit", "Work", "Immigrate"] """Store user input here""" global UPassport UPassport = namedtuple("UPassport", "country name gender isscity expdate serial") global UPass UPass = namedtuple("UPass", "name gender purpose duration serial expires") global UWorkVisa UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires") global URecord URecord = namedtuple("URecord", "purpose duration")
<commit_before>#!/usr/bin/python3.4 from collections import namedtuple """Document constants""" countries = ["Mordor", "Gondor", "Lorien", "Shire"] genders = ["Male", "Female"] cities = { 'Mordor': 'Minas Morgul,Barad Dur', 'Gondor': 'Minas Tirith,Isengard,Osgiliath', 'Lorien': 'Lorien', 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough' } purpose = ["Visit", "Transit", "Work", "Immigrate"] """Store user input here""" UPassport = namedtuple("UPassport", "country name gender isscity expdate serial") UPass = namedtuple("UPass", "name gender purpose duration serial expires") UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires") URecord = namedtuple("URecord", "purpose duration") <commit_msg>Define init function. Make variables actually global<commit_after>#!/usr/bin/python3.4 from collections import namedtuple def init(): """Document constants""" global countries countries = ["Mordor", "Gondor", "Lorien", "Shire"] global genders genders = ["Male", "Female"] global cities cities = { 'Mordor': 'Minas Morgul,Barad Dur', 'Gondor': 'Minas Tirith,Isengard,Osgiliath', 'Lorien': 'Lorien', 'Shire': 'Hobbiton,Waymeet,Frogmorton,Tuckborough' } global purpose purpose = ["Visit", "Transit", "Work", "Immigrate"] """Store user input here""" global UPassport UPassport = namedtuple("UPassport", "country name gender isscity expdate serial") global UPass UPass = namedtuple("UPass", "name gender purpose duration serial expires") global UWorkVisa UWorkVisa = namedtuple("UWorkVisa", "name proff duration expires") global URecord URecord = namedtuple("URecord", "purpose duration")
b6f0f3dfbf62b8b009155d3d510a908b73b53ab8
runtests.py
runtests.py
# -*- coding: utf-8 -*- """ Entry point for Django tests. This script will setup the basic configuration needed by Django. """ import sys from os.path import abspath, dirname, join try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='suit_dashboard.urls', INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', 'suit_dashboard', ], SITE_ID=1, MIDDLEWARE_CLASSES=() ) try: import django sys.path.append(abspath(join(dirname(__file__), 'src'))) setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError('To fix this error, maybe run ' '`pip install -r requirements/test.txt`') def run_tests(*test_args): """Discover and run tests.""" if not test_args: test_args = ['tests'] # Run tests runner = get_runner(settings) test_runner = runner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == '__main__': run_tests(*sys.argv[1:])
# -*- coding: utf-8 -*- """ Entry point for Django tests. This script will setup the basic configuration needed by Django. """ import sys from os.path import abspath, dirname, join try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='suit_dashboard.urls', INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', ], SITE_ID=1, MIDDLEWARE_CLASSES=() ) try: import django sys.path.append(abspath(join(dirname(__file__), 'src'))) setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError('To fix this error, maybe run ' '`pip install -r requirements/test.txt`') def run_tests(*test_args): """Discover and run tests.""" if not test_args: test_args = ['tests'] # Run tests runner = get_runner(settings) test_runner = runner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == '__main__': run_tests(*sys.argv[1:])
Remove suit_dashboard from INSTALLED_APPS causing py35 tests breaking as no models
Remove suit_dashboard from INSTALLED_APPS causing py35 tests breaking as no models
Python
isc
Pawamoy/django-suit-dashboard,Pawamoy/django-suit-dashboard,Pawamoy/django-suit-dashboard,Pawamoy/django-suit-dashboard
# -*- coding: utf-8 -*- """ Entry point for Django tests. This script will setup the basic configuration needed by Django. """ import sys from os.path import abspath, dirname, join try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='suit_dashboard.urls', INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', 'suit_dashboard', ], SITE_ID=1, MIDDLEWARE_CLASSES=() ) try: import django sys.path.append(abspath(join(dirname(__file__), 'src'))) setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError('To fix this error, maybe run ' '`pip install -r requirements/test.txt`') def run_tests(*test_args): """Discover and run tests.""" if not test_args: test_args = ['tests'] # Run tests runner = get_runner(settings) test_runner = runner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == '__main__': run_tests(*sys.argv[1:]) Remove suit_dashboard from INSTALLED_APPS causing py35 tests breaking as no models
# -*- coding: utf-8 -*- """ Entry point for Django tests. This script will setup the basic configuration needed by Django. """ import sys from os.path import abspath, dirname, join try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='suit_dashboard.urls', INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', ], SITE_ID=1, MIDDLEWARE_CLASSES=() ) try: import django sys.path.append(abspath(join(dirname(__file__), 'src'))) setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError('To fix this error, maybe run ' '`pip install -r requirements/test.txt`') def run_tests(*test_args): """Discover and run tests.""" if not test_args: test_args = ['tests'] # Run tests runner = get_runner(settings) test_runner = runner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == '__main__': run_tests(*sys.argv[1:])
<commit_before># -*- coding: utf-8 -*- """ Entry point for Django tests. This script will setup the basic configuration needed by Django. """ import sys from os.path import abspath, dirname, join try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='suit_dashboard.urls', INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', 'suit_dashboard', ], SITE_ID=1, MIDDLEWARE_CLASSES=() ) try: import django sys.path.append(abspath(join(dirname(__file__), 'src'))) setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError('To fix this error, maybe run ' '`pip install -r requirements/test.txt`') def run_tests(*test_args): """Discover and run tests.""" if not test_args: test_args = ['tests'] # Run tests runner = get_runner(settings) test_runner = runner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == '__main__': run_tests(*sys.argv[1:]) <commit_msg>Remove suit_dashboard from INSTALLED_APPS causing py35 tests breaking as no models<commit_after>
# -*- coding: utf-8 -*- """ Entry point for Django tests. This script will setup the basic configuration needed by Django. """ import sys from os.path import abspath, dirname, join try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='suit_dashboard.urls', INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', ], SITE_ID=1, MIDDLEWARE_CLASSES=() ) try: import django sys.path.append(abspath(join(dirname(__file__), 'src'))) setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError('To fix this error, maybe run ' '`pip install -r requirements/test.txt`') def run_tests(*test_args): """Discover and run tests.""" if not test_args: test_args = ['tests'] # Run tests runner = get_runner(settings) test_runner = runner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == '__main__': run_tests(*sys.argv[1:])
# -*- coding: utf-8 -*- """ Entry point for Django tests. This script will setup the basic configuration needed by Django. """ import sys from os.path import abspath, dirname, join try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='suit_dashboard.urls', INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', 'suit_dashboard', ], SITE_ID=1, MIDDLEWARE_CLASSES=() ) try: import django sys.path.append(abspath(join(dirname(__file__), 'src'))) setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError('To fix this error, maybe run ' '`pip install -r requirements/test.txt`') def run_tests(*test_args): """Discover and run tests.""" if not test_args: test_args = ['tests'] # Run tests runner = get_runner(settings) test_runner = runner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == '__main__': run_tests(*sys.argv[1:]) Remove suit_dashboard from INSTALLED_APPS causing py35 tests breaking as no models# -*- coding: utf-8 -*- """ Entry point for Django tests. This script will setup the basic configuration needed by Django. """ import sys from os.path import abspath, dirname, join try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='suit_dashboard.urls', INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', ], SITE_ID=1, MIDDLEWARE_CLASSES=() ) try: import django sys.path.append(abspath(join(dirname(__file__), 'src'))) setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError('To fix this error, maybe run ' '`pip install -r requirements/test.txt`') def run_tests(*test_args): """Discover and run tests.""" if not test_args: test_args = ['tests'] # Run tests runner = get_runner(settings) test_runner = runner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == '__main__': run_tests(*sys.argv[1:])
<commit_before># -*- coding: utf-8 -*- """ Entry point for Django tests. This script will setup the basic configuration needed by Django. """ import sys from os.path import abspath, dirname, join try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='suit_dashboard.urls', INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', 'suit_dashboard', ], SITE_ID=1, MIDDLEWARE_CLASSES=() ) try: import django sys.path.append(abspath(join(dirname(__file__), 'src'))) setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError('To fix this error, maybe run ' '`pip install -r requirements/test.txt`') def run_tests(*test_args): """Discover and run tests.""" if not test_args: test_args = ['tests'] # Run tests runner = get_runner(settings) test_runner = runner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == '__main__': run_tests(*sys.argv[1:]) <commit_msg>Remove suit_dashboard from INSTALLED_APPS causing py35 tests breaking as no models<commit_after># -*- coding: utf-8 -*- """ Entry point for Django tests. This script will setup the basic configuration needed by Django. """ import sys from os.path import abspath, dirname, join try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='suit_dashboard.urls', INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', ], SITE_ID=1, MIDDLEWARE_CLASSES=() ) try: import django sys.path.append(abspath(join(dirname(__file__), 'src'))) setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError('To fix this error, maybe run ' '`pip install -r requirements/test.txt`') def run_tests(*test_args): """Discover and run tests.""" if not test_args: test_args = ['tests'] # Run tests runner = get_runner(settings) test_runner = runner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == '__main__': run_tests(*sys.argv[1:])
daeeb010ce18fbcb0db62008285650916d2ed18f
action_plugins/insights.py
action_plugins/insights.py
from ansible.plugins.action import ActionBase from ansible.utils.vars import merge_hash class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): results = super(ActionModule, self).run(tmp, task_vars) # copy our egg tmp = self._make_tmp_path() source_full = self._loader.get_real_file("falafel-1.35.0-py2.7.egg") tmp_src = self._connection._shell.join_path(tmp, 'insights') remote_path = self._transfer_file(source_full, tmp_src) results = merge_hash(results, self._execute_module(module_args={"egg_path": remote_path}, module_name="insights", tmp=tmp, task_vars=task_vars)) return results
from ansible.plugins.action import ActionBase from ansible.utils.vars import merge_hash class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): results = super(ActionModule, self).run(tmp, task_vars) remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user # copy our egg tmp = self._make_tmp_path(remote_user) source_full = self._loader.get_real_file("falafel-1.35.0-py2.7.egg") tmp_src = self._connection._shell.join_path(tmp, 'insights') remote_path = self._transfer_file(source_full, tmp_src) results = merge_hash(results, self._execute_module(module_args={"egg_path": remote_path}, module_name="insights", tmp=tmp, task_vars=task_vars)) return results
Update action plugin to fix _make_tmp_path issue
Update action plugin to fix _make_tmp_path issue _make_tmp_path expects 2 arguments. One of those is the remote_user. Add two lines to the action plugin to look at the ansible playbook or config to get that value.
Python
lgpl-2.1
kylape/ansible-insights-client
from ansible.plugins.action import ActionBase from ansible.utils.vars import merge_hash class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): results = super(ActionModule, self).run(tmp, task_vars) # copy our egg tmp = self._make_tmp_path() source_full = self._loader.get_real_file("falafel-1.35.0-py2.7.egg") tmp_src = self._connection._shell.join_path(tmp, 'insights') remote_path = self._transfer_file(source_full, tmp_src) results = merge_hash(results, self._execute_module(module_args={"egg_path": remote_path}, module_name="insights", tmp=tmp, task_vars=task_vars)) return results Update action plugin to fix _make_tmp_path issue _make_tmp_path expects 2 arguments. One of those is the remote_user. Add two lines to the action plugin to look at the ansible playbook or config to get that value.
from ansible.plugins.action import ActionBase from ansible.utils.vars import merge_hash class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): results = super(ActionModule, self).run(tmp, task_vars) remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user # copy our egg tmp = self._make_tmp_path(remote_user) source_full = self._loader.get_real_file("falafel-1.35.0-py2.7.egg") tmp_src = self._connection._shell.join_path(tmp, 'insights') remote_path = self._transfer_file(source_full, tmp_src) results = merge_hash(results, self._execute_module(module_args={"egg_path": remote_path}, module_name="insights", tmp=tmp, task_vars=task_vars)) return results
<commit_before>from ansible.plugins.action import ActionBase from ansible.utils.vars import merge_hash class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): results = super(ActionModule, self).run(tmp, task_vars) # copy our egg tmp = self._make_tmp_path() source_full = self._loader.get_real_file("falafel-1.35.0-py2.7.egg") tmp_src = self._connection._shell.join_path(tmp, 'insights') remote_path = self._transfer_file(source_full, tmp_src) results = merge_hash(results, self._execute_module(module_args={"egg_path": remote_path}, module_name="insights", tmp=tmp, task_vars=task_vars)) return results <commit_msg>Update action plugin to fix _make_tmp_path issue _make_tmp_path expects 2 arguments. One of those is the remote_user. Add two lines to the action plugin to look at the ansible playbook or config to get that value.<commit_after>
from ansible.plugins.action import ActionBase from ansible.utils.vars import merge_hash class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): results = super(ActionModule, self).run(tmp, task_vars) remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user # copy our egg tmp = self._make_tmp_path(remote_user) source_full = self._loader.get_real_file("falafel-1.35.0-py2.7.egg") tmp_src = self._connection._shell.join_path(tmp, 'insights') remote_path = self._transfer_file(source_full, tmp_src) results = merge_hash(results, self._execute_module(module_args={"egg_path": remote_path}, module_name="insights", tmp=tmp, task_vars=task_vars)) return results
from ansible.plugins.action import ActionBase from ansible.utils.vars import merge_hash class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): results = super(ActionModule, self).run(tmp, task_vars) # copy our egg tmp = self._make_tmp_path() source_full = self._loader.get_real_file("falafel-1.35.0-py2.7.egg") tmp_src = self._connection._shell.join_path(tmp, 'insights') remote_path = self._transfer_file(source_full, tmp_src) results = merge_hash(results, self._execute_module(module_args={"egg_path": remote_path}, module_name="insights", tmp=tmp, task_vars=task_vars)) return results Update action plugin to fix _make_tmp_path issue _make_tmp_path expects 2 arguments. One of those is the remote_user. Add two lines to the action plugin to look at the ansible playbook or config to get that value.from ansible.plugins.action import ActionBase from ansible.utils.vars import merge_hash class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): results = super(ActionModule, self).run(tmp, task_vars) remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user # copy our egg tmp = self._make_tmp_path(remote_user) source_full = self._loader.get_real_file("falafel-1.35.0-py2.7.egg") tmp_src = self._connection._shell.join_path(tmp, 'insights') remote_path = self._transfer_file(source_full, tmp_src) results = merge_hash(results, self._execute_module(module_args={"egg_path": remote_path}, module_name="insights", tmp=tmp, task_vars=task_vars)) return results
<commit_before>from ansible.plugins.action import ActionBase from ansible.utils.vars import merge_hash class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): results = super(ActionModule, self).run(tmp, task_vars) # copy our egg tmp = self._make_tmp_path() source_full = self._loader.get_real_file("falafel-1.35.0-py2.7.egg") tmp_src = self._connection._shell.join_path(tmp, 'insights') remote_path = self._transfer_file(source_full, tmp_src) results = merge_hash(results, self._execute_module(module_args={"egg_path": remote_path}, module_name="insights", tmp=tmp, task_vars=task_vars)) return results <commit_msg>Update action plugin to fix _make_tmp_path issue _make_tmp_path expects 2 arguments. One of those is the remote_user. Add two lines to the action plugin to look at the ansible playbook or config to get that value.<commit_after>from ansible.plugins.action import ActionBase from ansible.utils.vars import merge_hash class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): results = super(ActionModule, self).run(tmp, task_vars) remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user # copy our egg tmp = self._make_tmp_path(remote_user) source_full = self._loader.get_real_file("falafel-1.35.0-py2.7.egg") tmp_src = self._connection._shell.join_path(tmp, 'insights') remote_path = self._transfer_file(source_full, tmp_src) results = merge_hash(results, self._execute_module(module_args={"egg_path": remote_path}, module_name="insights", tmp=tmp, task_vars=task_vars)) return results
a882dc4df8c69880182f258e6c1d37646584fbb2
models/stock.py
models/stock.py
# -*- encoding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in root directory ############################################################################## from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants_ids = self.ids for quant2merge in self: if (quant2merge.id in pending_quants_ids and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost if quant.id in pending_quants_ids: pending_quants_ids.remove(quant.id) quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants()
# -*- encoding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in root directory ############################################################################## from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants = self.filtered(lambda x: True) for quant2merge in self: if (quant2merge in pending_quants and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost pending_quants -= quant quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants()
Use browse record instead of ids
[MOD] Use browse record instead of ids
Python
agpl-3.0
acsone/stock-logistics-warehouse,kmee/stock-logistics-warehouse,avoinsystems/stock-logistics-warehouse,open-synergy/stock-logistics-warehouse,factorlibre/stock-logistics-warehouse
# -*- encoding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in root directory ############################################################################## from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants_ids = self.ids for quant2merge in self: if (quant2merge.id in pending_quants_ids and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost if quant.id in pending_quants_ids: pending_quants_ids.remove(quant.id) quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants() [MOD] Use browse record instead of ids
# -*- encoding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in root directory ############################################################################## from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants = self.filtered(lambda x: True) for quant2merge in self: if (quant2merge in pending_quants and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost pending_quants -= quant quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants()
<commit_before># -*- encoding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in root directory ############################################################################## from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants_ids = self.ids for quant2merge in self: if (quant2merge.id in pending_quants_ids and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost if quant.id in pending_quants_ids: pending_quants_ids.remove(quant.id) quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants() <commit_msg>[MOD] Use browse record instead of ids<commit_after>
# -*- encoding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in root directory ############################################################################## from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants = self.filtered(lambda x: True) for quant2merge in self: if (quant2merge in pending_quants and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost pending_quants -= quant quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants()
# -*- encoding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in root directory ############################################################################## from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants_ids = self.ids for quant2merge in self: if (quant2merge.id in pending_quants_ids and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost if quant.id in pending_quants_ids: pending_quants_ids.remove(quant.id) quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants() [MOD] Use browse record instead of ids# -*- encoding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in root directory ############################################################################## from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants = self.filtered(lambda x: True) for quant2merge in self: if (quant2merge in pending_quants and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost pending_quants -= quant quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants()
<commit_before># -*- encoding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in root directory ############################################################################## from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants_ids = self.ids for quant2merge in self: if (quant2merge.id in pending_quants_ids and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost if quant.id in pending_quants_ids: pending_quants_ids.remove(quant.id) quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants() <commit_msg>[MOD] Use browse record instead of ids<commit_after># -*- encoding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in root directory ############################################################################## from openerp import models, api class StockQuant(models.Model): _inherit = 'stock.quant' @api.multi def merge_stock_quants(self): pending_quants = self.filtered(lambda x: True) for quant2merge in self: if (quant2merge in pending_quants and not quant2merge.reservation_id): quants = self.search( [('id', '!=', quant2merge.id), ('product_id', '=', quant2merge.product_id.id), ('lot_id', '=', quant2merge.lot_id.id), ('package_id', '=', quant2merge.package_id.id), ('location_id', '=', quant2merge.location_id.id), ('reservation_id', '=', False), ('propagated_from_id', '=', quant2merge.propagated_from_id.id)]) for quant in quants: if (self._get_latest_move(quant2merge) == self._get_latest_move(quant)): quant2merge.qty += quant.qty quant2merge.cost += quant.cost pending_quants -= quant quant.sudo().unlink() @api.model def quants_unreserve(self, move): quants = move.reserved_quant_ids super(StockQuant, self).quants_unreserve(move) quants.merge_stock_quants()
8c1cc6895f5f8772d2b09a9efab7395b0a6b39ba
wake/filters.py
wake/filters.py
import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): return Markup(markdown.markdown(text))
import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): md = markdown.Markdown(extensions=['meta']) return Markup(md.convert(text))
Update Markdown filter to recognize metadata.
Update Markdown filter to recognize metadata.
Python
bsd-3-clause
chromakode/wake
import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): return Markup(markdown.markdown(text)) Update Markdown filter to recognize metadata.
import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): md = markdown.Markdown(extensions=['meta']) return Markup(md.convert(text))
<commit_before>import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): return Markup(markdown.markdown(text)) <commit_msg>Update Markdown filter to recognize metadata.<commit_after>
import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): md = markdown.Markdown(extensions=['meta']) return Markup(md.convert(text))
import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): return Markup(markdown.markdown(text)) Update Markdown filter to recognize metadata.import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): md = markdown.Markdown(extensions=['meta']) return Markup(md.convert(text))
<commit_before>import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): return Markup(markdown.markdown(text)) <commit_msg>Update Markdown filter to recognize metadata.<commit_after>import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): md = markdown.Markdown(extensions=['meta']) return Markup(md.convert(text))
d60112e569e13333cfd6316d30683282ceff8bee
changes/jobs/cleanup_builds.py
changes/jobs/cleanup_builds.py
from datetime import datetime, timedelta from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: now, }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, })
from datetime import datetime, timedelta from sqlalchemy.sql import func from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: func.now(), }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, })
Use func.now for timestamp update
Use func.now for timestamp update
Python
apache-2.0
dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes
from datetime import datetime, timedelta from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: now, }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, }) Use func.now for timestamp update
from datetime import datetime, timedelta from sqlalchemy.sql import func from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: func.now(), }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, })
<commit_before>from datetime import datetime, timedelta from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: now, }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, }) <commit_msg>Use func.now for timestamp update<commit_after>
from datetime import datetime, timedelta from sqlalchemy.sql import func from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: func.now(), }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, })
from datetime import datetime, timedelta from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: now, }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, }) Use func.now for timestamp updatefrom datetime import datetime, timedelta from sqlalchemy.sql import func from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: func.now(), }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, })
<commit_before>from datetime import datetime, timedelta from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: now, }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, }) <commit_msg>Use func.now for timestamp update<commit_after>from datetime import datetime, timedelta from sqlalchemy.sql import func from changes.config import db, queue from changes.constants import Status from changes.models.build import Build def cleanup_builds(): """ Look for any jobs which haven't checked in (but are listed in a pending state) and mark them as finished in an unknown state. """ now = datetime.utcnow() cutoff = now - timedelta(minutes=5) build_list = Build.query.filter( Build.status != Status.finished, Build.date_modified < cutoff, ) db.session.query(Build).filter( Build.id.in_(b.id for b in build_list), ).update({ Build.date_modified: func.now(), }) for build in build_list: queue.delay('sync_build', kwargs={ 'build_id': build.id.hex, })
a98fc5ee439b651f669dac527fc95636f8e2d9bf
django/applications/catmaid/management/commands/catmaid_set_user_profiles_to_default.py
django/applications/catmaid/management/commands/catmaid_set_user_profiles_to_default.py
from django.conf import settings from django.contrib.auth.models import User from django.core.management.base import NoArgsCommand, CommandError from optparse import make_option class Command(NoArgsCommand): help = "Set the user profile settings of every user to the defaults" option_list = NoArgsCommand.option_list + ( make_option('--update-anon-user', dest='update-anon-user', default=False, action='store_true', help='Update also the profile of the anonymous user'), ) def handle_noargs(self, **options): update_anon_user = 'update-anon-user' in options for u in User.objects.all(): # Ignore the anonymous user by default if u.id == settings.ANONYMOUS_USER_ID and not update_anon_user: continue up = u.userprofile # Expect user profiles to be there and add all default settings up.inverse_mouse_wheel = settings.PROFILE_DEFAULT_INVERSE_MOUSE_WHEEL up.show_text_label_tool = settings.PROFILE_SHOW_TEXT_LABEL_TOOL up.show_tagging_tool = settings.PROFILE_SHOW_TAGGING_TOOL up.show_cropping_tool = settings.PROFILE_SHOW_CROPPING_TOOL up.show_segmentation_tool = settings.PROFILE_SHOW_SEGMENTATION_TOOL up.show_tracing_tool = settings.PROFILE_SHOW_TRACING_TOOL # Save the changes up.save()
from django.conf import settings from django.contrib.auth.models import User from django.core.management.base import NoArgsCommand, CommandError from optparse import make_option class Command(NoArgsCommand): help = "Set the user profile settings of every user to the defaults" option_list = NoArgsCommand.option_list + ( make_option('--update-anon-user', dest='update-anon-user', default=False, action='store_true', help='Update also the profile of the anonymous user'), ) def handle_noargs(self, **options): update_anon_user = 'update-anon-user' in options for u in User.objects.all(): # Ignore the anonymous user by default if u.id == settings.ANONYMOUS_USER_ID and not update_anon_user: continue up = u.userprofile # Expect user profiles to be there and add all default settings up.inverse_mouse_wheel = settings.PROFILE_DEFAULT_INVERSE_MOUSE_WHEEL up.independent_ontology_workspace_is_default = \ settings.PROFILE_INDEPENDENT_ONTOLOGY_WORKSPACE_IS_DEFAULT up.show_text_label_tool = settings.PROFILE_SHOW_TEXT_LABEL_TOOL up.show_tagging_tool = settings.PROFILE_SHOW_TAGGING_TOOL up.show_cropping_tool = settings.PROFILE_SHOW_CROPPING_TOOL up.show_segmentation_tool = settings.PROFILE_SHOW_SEGMENTATION_TOOL up.show_tracing_tool = settings.PROFILE_SHOW_TRACING_TOOL up.show_ontology_tool = settings.PROFILE_SHOW_ONTOLOGY_TOOL # Save the changes up.save()
Bring user profile defaults management command up to date
Bring user profile defaults management command up to date
Python
agpl-3.0
htem/CATMAID,fzadow/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,fzadow/CATMAID,htem/CATMAID
from django.conf import settings from django.contrib.auth.models import User from django.core.management.base import NoArgsCommand, CommandError from optparse import make_option class Command(NoArgsCommand): help = "Set the user profile settings of every user to the defaults" option_list = NoArgsCommand.option_list + ( make_option('--update-anon-user', dest='update-anon-user', default=False, action='store_true', help='Update also the profile of the anonymous user'), ) def handle_noargs(self, **options): update_anon_user = 'update-anon-user' in options for u in User.objects.all(): # Ignore the anonymous user by default if u.id == settings.ANONYMOUS_USER_ID and not update_anon_user: continue up = u.userprofile # Expect user profiles to be there and add all default settings up.inverse_mouse_wheel = settings.PROFILE_DEFAULT_INVERSE_MOUSE_WHEEL up.show_text_label_tool = settings.PROFILE_SHOW_TEXT_LABEL_TOOL up.show_tagging_tool = settings.PROFILE_SHOW_TAGGING_TOOL up.show_cropping_tool = settings.PROFILE_SHOW_CROPPING_TOOL up.show_segmentation_tool = settings.PROFILE_SHOW_SEGMENTATION_TOOL up.show_tracing_tool = settings.PROFILE_SHOW_TRACING_TOOL # Save the changes up.save() Bring user profile defaults management command up to date
from django.conf import settings from django.contrib.auth.models import User from django.core.management.base import NoArgsCommand, CommandError from optparse import make_option class Command(NoArgsCommand): help = "Set the user profile settings of every user to the defaults" option_list = NoArgsCommand.option_list + ( make_option('--update-anon-user', dest='update-anon-user', default=False, action='store_true', help='Update also the profile of the anonymous user'), ) def handle_noargs(self, **options): update_anon_user = 'update-anon-user' in options for u in User.objects.all(): # Ignore the anonymous user by default if u.id == settings.ANONYMOUS_USER_ID and not update_anon_user: continue up = u.userprofile # Expect user profiles to be there and add all default settings up.inverse_mouse_wheel = settings.PROFILE_DEFAULT_INVERSE_MOUSE_WHEEL up.independent_ontology_workspace_is_default = \ settings.PROFILE_INDEPENDENT_ONTOLOGY_WORKSPACE_IS_DEFAULT up.show_text_label_tool = settings.PROFILE_SHOW_TEXT_LABEL_TOOL up.show_tagging_tool = settings.PROFILE_SHOW_TAGGING_TOOL up.show_cropping_tool = settings.PROFILE_SHOW_CROPPING_TOOL up.show_segmentation_tool = settings.PROFILE_SHOW_SEGMENTATION_TOOL up.show_tracing_tool = settings.PROFILE_SHOW_TRACING_TOOL up.show_ontology_tool = settings.PROFILE_SHOW_ONTOLOGY_TOOL # Save the changes up.save()
<commit_before>from django.conf import settings from django.contrib.auth.models import User from django.core.management.base import NoArgsCommand, CommandError from optparse import make_option class Command(NoArgsCommand): help = "Set the user profile settings of every user to the defaults" option_list = NoArgsCommand.option_list + ( make_option('--update-anon-user', dest='update-anon-user', default=False, action='store_true', help='Update also the profile of the anonymous user'), ) def handle_noargs(self, **options): update_anon_user = 'update-anon-user' in options for u in User.objects.all(): # Ignore the anonymous user by default if u.id == settings.ANONYMOUS_USER_ID and not update_anon_user: continue up = u.userprofile # Expect user profiles to be there and add all default settings up.inverse_mouse_wheel = settings.PROFILE_DEFAULT_INVERSE_MOUSE_WHEEL up.show_text_label_tool = settings.PROFILE_SHOW_TEXT_LABEL_TOOL up.show_tagging_tool = settings.PROFILE_SHOW_TAGGING_TOOL up.show_cropping_tool = settings.PROFILE_SHOW_CROPPING_TOOL up.show_segmentation_tool = settings.PROFILE_SHOW_SEGMENTATION_TOOL up.show_tracing_tool = settings.PROFILE_SHOW_TRACING_TOOL # Save the changes up.save() <commit_msg>Bring user profile defaults management command up to date<commit_after>
from django.conf import settings from django.contrib.auth.models import User from django.core.management.base import NoArgsCommand, CommandError from optparse import make_option class Command(NoArgsCommand): help = "Set the user profile settings of every user to the defaults" option_list = NoArgsCommand.option_list + ( make_option('--update-anon-user', dest='update-anon-user', default=False, action='store_true', help='Update also the profile of the anonymous user'), ) def handle_noargs(self, **options): update_anon_user = 'update-anon-user' in options for u in User.objects.all(): # Ignore the anonymous user by default if u.id == settings.ANONYMOUS_USER_ID and not update_anon_user: continue up = u.userprofile # Expect user profiles to be there and add all default settings up.inverse_mouse_wheel = settings.PROFILE_DEFAULT_INVERSE_MOUSE_WHEEL up.independent_ontology_workspace_is_default = \ settings.PROFILE_INDEPENDENT_ONTOLOGY_WORKSPACE_IS_DEFAULT up.show_text_label_tool = settings.PROFILE_SHOW_TEXT_LABEL_TOOL up.show_tagging_tool = settings.PROFILE_SHOW_TAGGING_TOOL up.show_cropping_tool = settings.PROFILE_SHOW_CROPPING_TOOL up.show_segmentation_tool = settings.PROFILE_SHOW_SEGMENTATION_TOOL up.show_tracing_tool = settings.PROFILE_SHOW_TRACING_TOOL up.show_ontology_tool = settings.PROFILE_SHOW_ONTOLOGY_TOOL # Save the changes up.save()
from django.conf import settings from django.contrib.auth.models import User from django.core.management.base import NoArgsCommand, CommandError from optparse import make_option class Command(NoArgsCommand): help = "Set the user profile settings of every user to the defaults" option_list = NoArgsCommand.option_list + ( make_option('--update-anon-user', dest='update-anon-user', default=False, action='store_true', help='Update also the profile of the anonymous user'), ) def handle_noargs(self, **options): update_anon_user = 'update-anon-user' in options for u in User.objects.all(): # Ignore the anonymous user by default if u.id == settings.ANONYMOUS_USER_ID and not update_anon_user: continue up = u.userprofile # Expect user profiles to be there and add all default settings up.inverse_mouse_wheel = settings.PROFILE_DEFAULT_INVERSE_MOUSE_WHEEL up.show_text_label_tool = settings.PROFILE_SHOW_TEXT_LABEL_TOOL up.show_tagging_tool = settings.PROFILE_SHOW_TAGGING_TOOL up.show_cropping_tool = settings.PROFILE_SHOW_CROPPING_TOOL up.show_segmentation_tool = settings.PROFILE_SHOW_SEGMENTATION_TOOL up.show_tracing_tool = settings.PROFILE_SHOW_TRACING_TOOL # Save the changes up.save() Bring user profile defaults management command up to datefrom django.conf import settings from django.contrib.auth.models import User from django.core.management.base import NoArgsCommand, CommandError from optparse import make_option class Command(NoArgsCommand): help = "Set the user profile settings of every user to the defaults" option_list = NoArgsCommand.option_list + ( make_option('--update-anon-user', dest='update-anon-user', default=False, action='store_true', help='Update also the profile of the anonymous user'), ) def handle_noargs(self, **options): update_anon_user = 'update-anon-user' in options for u in User.objects.all(): # Ignore the anonymous user by default if u.id == settings.ANONYMOUS_USER_ID and not update_anon_user: continue up = u.userprofile # Expect user profiles to be there and add all default settings up.inverse_mouse_wheel = settings.PROFILE_DEFAULT_INVERSE_MOUSE_WHEEL up.independent_ontology_workspace_is_default = \ settings.PROFILE_INDEPENDENT_ONTOLOGY_WORKSPACE_IS_DEFAULT up.show_text_label_tool = settings.PROFILE_SHOW_TEXT_LABEL_TOOL up.show_tagging_tool = settings.PROFILE_SHOW_TAGGING_TOOL up.show_cropping_tool = settings.PROFILE_SHOW_CROPPING_TOOL up.show_segmentation_tool = settings.PROFILE_SHOW_SEGMENTATION_TOOL up.show_tracing_tool = settings.PROFILE_SHOW_TRACING_TOOL up.show_ontology_tool = settings.PROFILE_SHOW_ONTOLOGY_TOOL # Save the changes up.save()
<commit_before>from django.conf import settings from django.contrib.auth.models import User from django.core.management.base import NoArgsCommand, CommandError from optparse import make_option class Command(NoArgsCommand): help = "Set the user profile settings of every user to the defaults" option_list = NoArgsCommand.option_list + ( make_option('--update-anon-user', dest='update-anon-user', default=False, action='store_true', help='Update also the profile of the anonymous user'), ) def handle_noargs(self, **options): update_anon_user = 'update-anon-user' in options for u in User.objects.all(): # Ignore the anonymous user by default if u.id == settings.ANONYMOUS_USER_ID and not update_anon_user: continue up = u.userprofile # Expect user profiles to be there and add all default settings up.inverse_mouse_wheel = settings.PROFILE_DEFAULT_INVERSE_MOUSE_WHEEL up.show_text_label_tool = settings.PROFILE_SHOW_TEXT_LABEL_TOOL up.show_tagging_tool = settings.PROFILE_SHOW_TAGGING_TOOL up.show_cropping_tool = settings.PROFILE_SHOW_CROPPING_TOOL up.show_segmentation_tool = settings.PROFILE_SHOW_SEGMENTATION_TOOL up.show_tracing_tool = settings.PROFILE_SHOW_TRACING_TOOL # Save the changes up.save() <commit_msg>Bring user profile defaults management command up to date<commit_after>from django.conf import settings from django.contrib.auth.models import User from django.core.management.base import NoArgsCommand, CommandError from optparse import make_option class Command(NoArgsCommand): help = "Set the user profile settings of every user to the defaults" option_list = NoArgsCommand.option_list + ( make_option('--update-anon-user', dest='update-anon-user', default=False, action='store_true', help='Update also the profile of the anonymous user'), ) def handle_noargs(self, **options): update_anon_user = 'update-anon-user' in options for u in User.objects.all(): # Ignore the anonymous user by default if u.id == settings.ANONYMOUS_USER_ID and not update_anon_user: continue up = u.userprofile # Expect user profiles to be there and add all default settings up.inverse_mouse_wheel = settings.PROFILE_DEFAULT_INVERSE_MOUSE_WHEEL up.independent_ontology_workspace_is_default = \ settings.PROFILE_INDEPENDENT_ONTOLOGY_WORKSPACE_IS_DEFAULT up.show_text_label_tool = settings.PROFILE_SHOW_TEXT_LABEL_TOOL up.show_tagging_tool = settings.PROFILE_SHOW_TAGGING_TOOL up.show_cropping_tool = settings.PROFILE_SHOW_CROPPING_TOOL up.show_segmentation_tool = settings.PROFILE_SHOW_SEGMENTATION_TOOL up.show_tracing_tool = settings.PROFILE_SHOW_TRACING_TOOL up.show_ontology_tool = settings.PROFILE_SHOW_ONTOLOGY_TOOL # Save the changes up.save()
978fe280a610c6cf9fb83b4726c7c1f536b92720
project/urls.py
project/urls.py
# Django # Third-Party from rest_framework.documentation import include_docs_urls from rest_framework.schemas import get_schema_view # from django.views.generic import TemplateView # from api.views import variance, ann from django.conf import settings from django.conf.urls import ( include, url, ) from django.contrib import admin from django.http import HttpResponse schema_view = get_schema_view( title='Barberscore API', ) urlpatterns = [ url(r'^admin/', admin.site.urls), # url(r'^variance/$', variance), # url(r'^ann/$', ann), url(r'^api/', include('api.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^schema/', schema_view), url(r'^docs/', include_docs_urls(title='Foobar', description='foo to the bar')), url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
# Django # Third-Party from rest_framework.documentation import include_docs_urls from rest_framework.schemas import get_schema_view # from django.views.generic import TemplateView # from api.views import variance, ann from django.conf import settings from django.conf.urls import ( include, url, ) from django.contrib import admin from django.http import HttpResponse, HttpResponseRedirect schema_view = get_schema_view( title='Barberscore API', ) urlpatterns = [ url(r'^$', lambda r: HttpResponseRedirect('admin/')), url(r'^admin/', admin.site.urls), # url(r'^variance/$', variance), # url(r'^ann/$', ann), url(r'^api/', include('api.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^schema/', schema_view), url(r'^docs/', include_docs_urls(title='Foobar', description='foo to the bar')), url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
Add direct forward to admin
Add direct forward to admin
Python
bsd-2-clause
dbinetti/barberscore-django,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,barberscore/barberscore-api,dbinetti/barberscore,dbinetti/barberscore,barberscore/barberscore-api
# Django # Third-Party from rest_framework.documentation import include_docs_urls from rest_framework.schemas import get_schema_view # from django.views.generic import TemplateView # from api.views import variance, ann from django.conf import settings from django.conf.urls import ( include, url, ) from django.contrib import admin from django.http import HttpResponse schema_view = get_schema_view( title='Barberscore API', ) urlpatterns = [ url(r'^admin/', admin.site.urls), # url(r'^variance/$', variance), # url(r'^ann/$', ann), url(r'^api/', include('api.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^schema/', schema_view), url(r'^docs/', include_docs_urls(title='Foobar', description='foo to the bar')), url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ] Add direct forward to admin
# Django # Third-Party from rest_framework.documentation import include_docs_urls from rest_framework.schemas import get_schema_view # from django.views.generic import TemplateView # from api.views import variance, ann from django.conf import settings from django.conf.urls import ( include, url, ) from django.contrib import admin from django.http import HttpResponse, HttpResponseRedirect schema_view = get_schema_view( title='Barberscore API', ) urlpatterns = [ url(r'^$', lambda r: HttpResponseRedirect('admin/')), url(r'^admin/', admin.site.urls), # url(r'^variance/$', variance), # url(r'^ann/$', ann), url(r'^api/', include('api.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^schema/', schema_view), url(r'^docs/', include_docs_urls(title='Foobar', description='foo to the bar')), url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
<commit_before># Django # Third-Party from rest_framework.documentation import include_docs_urls from rest_framework.schemas import get_schema_view # from django.views.generic import TemplateView # from api.views import variance, ann from django.conf import settings from django.conf.urls import ( include, url, ) from django.contrib import admin from django.http import HttpResponse schema_view = get_schema_view( title='Barberscore API', ) urlpatterns = [ url(r'^admin/', admin.site.urls), # url(r'^variance/$', variance), # url(r'^ann/$', ann), url(r'^api/', include('api.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^schema/', schema_view), url(r'^docs/', include_docs_urls(title='Foobar', description='foo to the bar')), url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ] <commit_msg>Add direct forward to admin<commit_after>
# Django # Third-Party from rest_framework.documentation import include_docs_urls from rest_framework.schemas import get_schema_view # from django.views.generic import TemplateView # from api.views import variance, ann from django.conf import settings from django.conf.urls import ( include, url, ) from django.contrib import admin from django.http import HttpResponse, HttpResponseRedirect schema_view = get_schema_view( title='Barberscore API', ) urlpatterns = [ url(r'^$', lambda r: HttpResponseRedirect('admin/')), url(r'^admin/', admin.site.urls), # url(r'^variance/$', variance), # url(r'^ann/$', ann), url(r'^api/', include('api.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^schema/', schema_view), url(r'^docs/', include_docs_urls(title='Foobar', description='foo to the bar')), url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
# Django # Third-Party from rest_framework.documentation import include_docs_urls from rest_framework.schemas import get_schema_view # from django.views.generic import TemplateView # from api.views import variance, ann from django.conf import settings from django.conf.urls import ( include, url, ) from django.contrib import admin from django.http import HttpResponse schema_view = get_schema_view( title='Barberscore API', ) urlpatterns = [ url(r'^admin/', admin.site.urls), # url(r'^variance/$', variance), # url(r'^ann/$', ann), url(r'^api/', include('api.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^schema/', schema_view), url(r'^docs/', include_docs_urls(title='Foobar', description='foo to the bar')), url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ] Add direct forward to admin# Django # Third-Party from rest_framework.documentation import include_docs_urls from rest_framework.schemas import get_schema_view # from django.views.generic import TemplateView # from api.views import variance, ann from django.conf import settings from django.conf.urls import ( include, url, ) from django.contrib import admin from django.http import HttpResponse, HttpResponseRedirect schema_view = get_schema_view( title='Barberscore API', ) urlpatterns = [ url(r'^$', lambda r: HttpResponseRedirect('admin/')), url(r'^admin/', admin.site.urls), # url(r'^variance/$', variance), # url(r'^ann/$', ann), url(r'^api/', include('api.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^schema/', schema_view), url(r'^docs/', include_docs_urls(title='Foobar', description='foo to the bar')), url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
<commit_before># Django # Third-Party from rest_framework.documentation import include_docs_urls from rest_framework.schemas import get_schema_view # from django.views.generic import TemplateView # from api.views import variance, ann from django.conf import settings from django.conf.urls import ( include, url, ) from django.contrib import admin from django.http import HttpResponse schema_view = get_schema_view( title='Barberscore API', ) urlpatterns = [ url(r'^admin/', admin.site.urls), # url(r'^variance/$', variance), # url(r'^ann/$', ann), url(r'^api/', include('api.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^schema/', schema_view), url(r'^docs/', include_docs_urls(title='Foobar', description='foo to the bar')), url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ] <commit_msg>Add direct forward to admin<commit_after># Django # Third-Party from rest_framework.documentation import include_docs_urls from rest_framework.schemas import get_schema_view # from django.views.generic import TemplateView # from api.views import variance, ann from django.conf import settings from django.conf.urls import ( include, url, ) from django.contrib import admin from django.http import HttpResponse, HttpResponseRedirect schema_view = get_schema_view( title='Barberscore API', ) urlpatterns = [ url(r'^$', lambda r: HttpResponseRedirect('admin/')), url(r'^admin/', admin.site.urls), # url(r'^variance/$', variance), # url(r'^ann/$', ann), url(r'^api/', include('api.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^schema/', schema_view), url(r'^docs/', include_docs_urls(title='Foobar', description='foo to the bar')), url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
ea93225dd2da27a18f61de0a92f371766d5317ec
scanpointgenerator/point.py
scanpointgenerator/point.py
from collections import OrderedDict class Point(object): """Contains information about for each scan point Attributes: positions (dict): Dict of str position_name -> float position for each scannable dimension. E.g. {"x": 0.1, "y": 2.2} lower (dict): Dict of str position_name -> float lower_bound for each scannable dimension. E.g. {"x": 0.95, "y": 2.15} upper (dict): Dict of str position_name -> float upper_bound for each scannable dimension. E.g. {"x": 1.05, "y": 2.25} indexes (list): List of int indexes for each dataset dimension, fastest changing last. E.g. [15] """ def __init__(self): self.positions = OrderedDict() self.lower = OrderedDict() self.upper = OrderedDict() self.indexes = []
class Point(object): """Contains information about for each scan point Attributes: positions (dict): Dict of str position_name -> float position for each scannable dimension. E.g. {"x": 0.1, "y": 2.2} lower (dict): Dict of str position_name -> float lower_bound for each scannable dimension. E.g. {"x": 0.95, "y": 2.15} upper (dict): Dict of str position_name -> float upper_bound for each scannable dimension. E.g. {"x": 1.05, "y": 2.25} indexes (list): List of int indexes for each dataset dimension, fastest changing last. E.g. [15] """ def __init__(self): self.positions = {} self.lower = {} self.upper = {} self.indexes = []
Update Point to use normal dictionaries for its coordinates
Update Point to use normal dictionaries for its coordinates
Python
apache-2.0
dls-controls/scanpointgenerator
from collections import OrderedDict class Point(object): """Contains information about for each scan point Attributes: positions (dict): Dict of str position_name -> float position for each scannable dimension. E.g. {"x": 0.1, "y": 2.2} lower (dict): Dict of str position_name -> float lower_bound for each scannable dimension. E.g. {"x": 0.95, "y": 2.15} upper (dict): Dict of str position_name -> float upper_bound for each scannable dimension. E.g. {"x": 1.05, "y": 2.25} indexes (list): List of int indexes for each dataset dimension, fastest changing last. E.g. [15] """ def __init__(self): self.positions = OrderedDict() self.lower = OrderedDict() self.upper = OrderedDict() self.indexes = [] Update Point to use normal dictionaries for its coordinates
class Point(object): """Contains information about for each scan point Attributes: positions (dict): Dict of str position_name -> float position for each scannable dimension. E.g. {"x": 0.1, "y": 2.2} lower (dict): Dict of str position_name -> float lower_bound for each scannable dimension. E.g. {"x": 0.95, "y": 2.15} upper (dict): Dict of str position_name -> float upper_bound for each scannable dimension. E.g. {"x": 1.05, "y": 2.25} indexes (list): List of int indexes for each dataset dimension, fastest changing last. E.g. [15] """ def __init__(self): self.positions = {} self.lower = {} self.upper = {} self.indexes = []
<commit_before>from collections import OrderedDict class Point(object): """Contains information about for each scan point Attributes: positions (dict): Dict of str position_name -> float position for each scannable dimension. E.g. {"x": 0.1, "y": 2.2} lower (dict): Dict of str position_name -> float lower_bound for each scannable dimension. E.g. {"x": 0.95, "y": 2.15} upper (dict): Dict of str position_name -> float upper_bound for each scannable dimension. E.g. {"x": 1.05, "y": 2.25} indexes (list): List of int indexes for each dataset dimension, fastest changing last. E.g. [15] """ def __init__(self): self.positions = OrderedDict() self.lower = OrderedDict() self.upper = OrderedDict() self.indexes = [] <commit_msg>Update Point to use normal dictionaries for its coordinates<commit_after>
class Point(object): """Contains information about for each scan point Attributes: positions (dict): Dict of str position_name -> float position for each scannable dimension. E.g. {"x": 0.1, "y": 2.2} lower (dict): Dict of str position_name -> float lower_bound for each scannable dimension. E.g. {"x": 0.95, "y": 2.15} upper (dict): Dict of str position_name -> float upper_bound for each scannable dimension. E.g. {"x": 1.05, "y": 2.25} indexes (list): List of int indexes for each dataset dimension, fastest changing last. E.g. [15] """ def __init__(self): self.positions = {} self.lower = {} self.upper = {} self.indexes = []
from collections import OrderedDict class Point(object): """Contains information about for each scan point Attributes: positions (dict): Dict of str position_name -> float position for each scannable dimension. E.g. {"x": 0.1, "y": 2.2} lower (dict): Dict of str position_name -> float lower_bound for each scannable dimension. E.g. {"x": 0.95, "y": 2.15} upper (dict): Dict of str position_name -> float upper_bound for each scannable dimension. E.g. {"x": 1.05, "y": 2.25} indexes (list): List of int indexes for each dataset dimension, fastest changing last. E.g. [15] """ def __init__(self): self.positions = OrderedDict() self.lower = OrderedDict() self.upper = OrderedDict() self.indexes = [] Update Point to use normal dictionaries for its coordinates class Point(object): """Contains information about for each scan point Attributes: positions (dict): Dict of str position_name -> float position for each scannable dimension. E.g. {"x": 0.1, "y": 2.2} lower (dict): Dict of str position_name -> float lower_bound for each scannable dimension. E.g. {"x": 0.95, "y": 2.15} upper (dict): Dict of str position_name -> float upper_bound for each scannable dimension. E.g. {"x": 1.05, "y": 2.25} indexes (list): List of int indexes for each dataset dimension, fastest changing last. E.g. [15] """ def __init__(self): self.positions = {} self.lower = {} self.upper = {} self.indexes = []
<commit_before>from collections import OrderedDict class Point(object): """Contains information about for each scan point Attributes: positions (dict): Dict of str position_name -> float position for each scannable dimension. E.g. {"x": 0.1, "y": 2.2} lower (dict): Dict of str position_name -> float lower_bound for each scannable dimension. E.g. {"x": 0.95, "y": 2.15} upper (dict): Dict of str position_name -> float upper_bound for each scannable dimension. E.g. {"x": 1.05, "y": 2.25} indexes (list): List of int indexes for each dataset dimension, fastest changing last. E.g. [15] """ def __init__(self): self.positions = OrderedDict() self.lower = OrderedDict() self.upper = OrderedDict() self.indexes = [] <commit_msg>Update Point to use normal dictionaries for its coordinates<commit_after> class Point(object): """Contains information about for each scan point Attributes: positions (dict): Dict of str position_name -> float position for each scannable dimension. E.g. {"x": 0.1, "y": 2.2} lower (dict): Dict of str position_name -> float lower_bound for each scannable dimension. E.g. {"x": 0.95, "y": 2.15} upper (dict): Dict of str position_name -> float upper_bound for each scannable dimension. E.g. {"x": 1.05, "y": 2.25} indexes (list): List of int indexes for each dataset dimension, fastest changing last. E.g. [15] """ def __init__(self): self.positions = {} self.lower = {} self.upper = {} self.indexes = []
eb1d581a94f87feb2bc09dbf45b13de282a205e8
pyqode/json/modes/autocomplete.py
pyqode/json/modes/autocomplete.py
from pyqode.core import modes from pyqode.core.api import TextHelper class AutoCompleteMode(modes.AutoCompleteMode): def __init__(self): super(AutoCompleteMode, self).__init__() self.QUOTES_FORMATS.pop("'") self.SELECTED_QUOTES_FORMATS.pop("'") self.MAPPING.pop("'") def _on_key_pressed(self, event): helper = TextHelper(self.editor) indent = helper.line_indent() * ' ' if self.editor.textCursor().positionInBlock() == len(indent): self.QUOTES_FORMATS['"'] = '%s:' else: self.QUOTES_FORMATS['"'] = '%s' self.QUOTES_FORMATS['{'] = '\n' + indent + '%s' self.QUOTES_FORMATS['['] = '\n' + indent + '%s' super(AutoCompleteMode, self)._on_key_pressed(event)
from pyqode.core import modes from pyqode.core.api import TextHelper class AutoCompleteMode(modes.AutoCompleteMode): def __init__(self): super(AutoCompleteMode, self).__init__() try: self.QUOTES_FORMATS.pop("'") self.SELECTED_QUOTES_FORMATS.pop("'") self.MAPPING.pop("'") except KeyError: pass def _on_key_pressed(self, event): helper = TextHelper(self.editor) indent = helper.line_indent() * ' ' if self.editor.textCursor().positionInBlock() == len(indent): self.QUOTES_FORMATS['"'] = '%s:' else: self.QUOTES_FORMATS['"'] = '%s' self.QUOTES_FORMATS['{'] = '\n' + indent + '%s' self.QUOTES_FORMATS['['] = '\n' + indent + '%s' super(AutoCompleteMode, self)._on_key_pressed(event)
Fix issue with auto complete when more than 1 editor has been created
Fix issue with auto complete when more than 1 editor has been created
Python
mit
pyQode/pyqode.json,pyQode/pyqode.json
from pyqode.core import modes from pyqode.core.api import TextHelper class AutoCompleteMode(modes.AutoCompleteMode): def __init__(self): super(AutoCompleteMode, self).__init__() self.QUOTES_FORMATS.pop("'") self.SELECTED_QUOTES_FORMATS.pop("'") self.MAPPING.pop("'") def _on_key_pressed(self, event): helper = TextHelper(self.editor) indent = helper.line_indent() * ' ' if self.editor.textCursor().positionInBlock() == len(indent): self.QUOTES_FORMATS['"'] = '%s:' else: self.QUOTES_FORMATS['"'] = '%s' self.QUOTES_FORMATS['{'] = '\n' + indent + '%s' self.QUOTES_FORMATS['['] = '\n' + indent + '%s' super(AutoCompleteMode, self)._on_key_pressed(event) Fix issue with auto complete when more than 1 editor has been created
from pyqode.core import modes from pyqode.core.api import TextHelper class AutoCompleteMode(modes.AutoCompleteMode): def __init__(self): super(AutoCompleteMode, self).__init__() try: self.QUOTES_FORMATS.pop("'") self.SELECTED_QUOTES_FORMATS.pop("'") self.MAPPING.pop("'") except KeyError: pass def _on_key_pressed(self, event): helper = TextHelper(self.editor) indent = helper.line_indent() * ' ' if self.editor.textCursor().positionInBlock() == len(indent): self.QUOTES_FORMATS['"'] = '%s:' else: self.QUOTES_FORMATS['"'] = '%s' self.QUOTES_FORMATS['{'] = '\n' + indent + '%s' self.QUOTES_FORMATS['['] = '\n' + indent + '%s' super(AutoCompleteMode, self)._on_key_pressed(event)
<commit_before>from pyqode.core import modes from pyqode.core.api import TextHelper class AutoCompleteMode(modes.AutoCompleteMode): def __init__(self): super(AutoCompleteMode, self).__init__() self.QUOTES_FORMATS.pop("'") self.SELECTED_QUOTES_FORMATS.pop("'") self.MAPPING.pop("'") def _on_key_pressed(self, event): helper = TextHelper(self.editor) indent = helper.line_indent() * ' ' if self.editor.textCursor().positionInBlock() == len(indent): self.QUOTES_FORMATS['"'] = '%s:' else: self.QUOTES_FORMATS['"'] = '%s' self.QUOTES_FORMATS['{'] = '\n' + indent + '%s' self.QUOTES_FORMATS['['] = '\n' + indent + '%s' super(AutoCompleteMode, self)._on_key_pressed(event) <commit_msg>Fix issue with auto complete when more than 1 editor has been created<commit_after>
from pyqode.core import modes from pyqode.core.api import TextHelper class AutoCompleteMode(modes.AutoCompleteMode): def __init__(self): super(AutoCompleteMode, self).__init__() try: self.QUOTES_FORMATS.pop("'") self.SELECTED_QUOTES_FORMATS.pop("'") self.MAPPING.pop("'") except KeyError: pass def _on_key_pressed(self, event): helper = TextHelper(self.editor) indent = helper.line_indent() * ' ' if self.editor.textCursor().positionInBlock() == len(indent): self.QUOTES_FORMATS['"'] = '%s:' else: self.QUOTES_FORMATS['"'] = '%s' self.QUOTES_FORMATS['{'] = '\n' + indent + '%s' self.QUOTES_FORMATS['['] = '\n' + indent + '%s' super(AutoCompleteMode, self)._on_key_pressed(event)
from pyqode.core import modes from pyqode.core.api import TextHelper class AutoCompleteMode(modes.AutoCompleteMode): def __init__(self): super(AutoCompleteMode, self).__init__() self.QUOTES_FORMATS.pop("'") self.SELECTED_QUOTES_FORMATS.pop("'") self.MAPPING.pop("'") def _on_key_pressed(self, event): helper = TextHelper(self.editor) indent = helper.line_indent() * ' ' if self.editor.textCursor().positionInBlock() == len(indent): self.QUOTES_FORMATS['"'] = '%s:' else: self.QUOTES_FORMATS['"'] = '%s' self.QUOTES_FORMATS['{'] = '\n' + indent + '%s' self.QUOTES_FORMATS['['] = '\n' + indent + '%s' super(AutoCompleteMode, self)._on_key_pressed(event) Fix issue with auto complete when more than 1 editor has been createdfrom pyqode.core import modes from pyqode.core.api import TextHelper class AutoCompleteMode(modes.AutoCompleteMode): def __init__(self): super(AutoCompleteMode, self).__init__() try: self.QUOTES_FORMATS.pop("'") self.SELECTED_QUOTES_FORMATS.pop("'") self.MAPPING.pop("'") except KeyError: pass def _on_key_pressed(self, event): helper = TextHelper(self.editor) indent = helper.line_indent() * ' ' if self.editor.textCursor().positionInBlock() == len(indent): self.QUOTES_FORMATS['"'] = '%s:' else: self.QUOTES_FORMATS['"'] = '%s' self.QUOTES_FORMATS['{'] = '\n' + indent + '%s' self.QUOTES_FORMATS['['] = '\n' + indent + '%s' super(AutoCompleteMode, self)._on_key_pressed(event)
<commit_before>from pyqode.core import modes from pyqode.core.api import TextHelper class AutoCompleteMode(modes.AutoCompleteMode): def __init__(self): super(AutoCompleteMode, self).__init__() self.QUOTES_FORMATS.pop("'") self.SELECTED_QUOTES_FORMATS.pop("'") self.MAPPING.pop("'") def _on_key_pressed(self, event): helper = TextHelper(self.editor) indent = helper.line_indent() * ' ' if self.editor.textCursor().positionInBlock() == len(indent): self.QUOTES_FORMATS['"'] = '%s:' else: self.QUOTES_FORMATS['"'] = '%s' self.QUOTES_FORMATS['{'] = '\n' + indent + '%s' self.QUOTES_FORMATS['['] = '\n' + indent + '%s' super(AutoCompleteMode, self)._on_key_pressed(event) <commit_msg>Fix issue with auto complete when more than 1 editor has been created<commit_after>from pyqode.core import modes from pyqode.core.api import TextHelper class AutoCompleteMode(modes.AutoCompleteMode): def __init__(self): super(AutoCompleteMode, self).__init__() try: self.QUOTES_FORMATS.pop("'") self.SELECTED_QUOTES_FORMATS.pop("'") self.MAPPING.pop("'") except KeyError: pass def _on_key_pressed(self, event): helper = TextHelper(self.editor) indent = helper.line_indent() * ' ' if self.editor.textCursor().positionInBlock() == len(indent): self.QUOTES_FORMATS['"'] = '%s:' else: self.QUOTES_FORMATS['"'] = '%s' self.QUOTES_FORMATS['{'] = '\n' + indent + '%s' self.QUOTES_FORMATS['['] = '\n' + indent + '%s' super(AutoCompleteMode, self)._on_key_pressed(event)
a60fa6989abd1080cafc860121885ec210d16771
script/update-frameworks.py
script/update-frameworks.py
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell/releases/download/v0.11.10' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell-frameworks/releases/download/v0.0.1' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
Use atom/atom-shell-frameworks until atom/atom-shell is public
Use atom/atom-shell-frameworks until atom/atom-shell is public
Python
mit
Rokt33r/electron,shiftkey/electron,Jacobichou/electron,shaundunne/electron,systembugtj/electron,dkfiresky/electron,subblue/electron,wan-qy/electron,brave/muon,preco21/electron,farmisen/electron,icattlecoder/electron,sshiting/electron,dkfiresky/electron,kostia/electron,MaxWhere/electron,chrisswk/electron,electron/electron,mrwizard82d1/electron,yan-foto/electron,posix4e/electron,Rokt33r/electron,jonatasfreitasv/electron,chriskdon/electron,hokein/atom-shell,thomsonreuters/electron,the-ress/electron,LadyNaggaga/electron,kenmozi/electron,brenca/electron,kenmozi/electron,aichingm/electron,ankitaggarwal011/electron,subblue/electron,natgolov/electron,edulan/electron,arturts/electron,jaanus/electron,tylergibson/electron,minggo/electron,robinvandernoord/electron,ianscrivener/electron,zhakui/electron,lrlna/electron,RIAEvangelist/electron,nekuz0r/electron,MaxGraey/electron,aaron-goshine/electron,jonatasfreitasv/electron,roadev/electron,d-salas/electron,jaanus/electron,bpasero/electron,deepak1556/atom-shell,kazupon/electron,arusakov/electron,JussMee15/electron,gbn972/electron,thingsinjars/electron,thompsonemerson/electron,ankitaggarwal011/electron,electron/electron,matiasinsaurralde/electron,fffej/electron,Floato/electron,matiasinsaurralde/electron,wolfflow/electron,benweissmann/electron,yalexx/electron,faizalpribadi/electron,preco21/electron,felixrieseberg/electron,bruce/electron,kazupon/electron,stevemao/electron,jiaz/electron,stevemao/electron,aliib/electron,trigrass2/electron,mrwizard82d1/electron,biblerule/UMCTelnetHub,wolfflow/electron,lrlna/electron,trankmichael/electron,coderhaoxin/electron,shockone/electron,deed02392/electron,wolfflow/electron,beni55/electron,Jonekee/electron,edulan/electron,destan/electron,coderhaoxin/electron,astoilkov/electron,astoilkov/electron,Jacobichou/electron,gamedevsam/electron,pirafrank/electron,digideskio/electron,webmechanicx/electron,etiktin/electron,fffej/electron,bwiggs/electron,gbn972/electron,maxogden/atom-shell,howmuchcomputer/electron,leolujuyi/electron,JussMee15/electron,thingsinjars/electron,meowlab/electron,sircharleswatson/electron,gabrielPeart/electron,noikiy/electron,simongregory/electron,lzpfmh/electron,rajatsingla28/electron,Evercoder/electron,micalan/electron,chrisswk/electron,baiwyc119/electron,hokein/atom-shell,Floato/electron,voidbridge/electron,ianscrivener/electron,dkfiresky/electron,seanchas116/electron,michaelchiche/electron,smczk/electron,bitemyapp/electron,brenca/electron,twolfson/electron,John-Lin/electron,pandoraui/electron,bbondy/electron,RIAEvangelist/electron,saronwei/electron,meowlab/electron,webmechanicx/electron,gamedevsam/electron,xiruibing/electron,vHanda/electron,shiftkey/electron,meowlab/electron,yan-foto/electron,seanchas116/electron,arusakov/electron,xfstudio/electron,systembugtj/electron,pirafrank/electron,rreimann/electron,shiftkey/electron,gabrielPeart/electron,bruce/electron,coderhaoxin/electron,stevemao/electron,mrwizard82d1/electron,fritx/electron,arusakov/electron,brave/muon,bwiggs/electron,synaptek/electron,bright-sparks/electron,gbn972/electron,gerhardberger/electron,medixdev/electron,Jacobichou/electron,felixrieseberg/electron,fabien-d/electron,electron/electron,mrwizard82d1/electron,Zagorakiss/electron,kenmozi/electron,JesselJohn/electron,ervinb/electron,joneit/electron,fireball-x/atom-shell,Jonekee/electron,hokein/atom-shell,mubassirhayat/electron,astoilkov/electron,Ivshti/electron,cos2004/electron,wolfflow/electron,fireball-x/atom-shell,nicholasess/electron,kazupon/electron,shaundunne/electron,fffej/electron,preco21/electron,sshiting/electron,kcrt/electron,lrlna/electron,nagyistoce/electron-atom-shell,Zagorakiss/electron,Evercoder/electron,thomsonreuters/electron,bobwol/electron,tincan24/electron,IonicaBizauKitchen/electron,ianscrivener/electron,faizalpribadi/electron,brave/muon,twolfson/electron,chrisswk/electron,subblue/electron,bwiggs/electron,rhencke/electron,zhakui/electron,deepak1556/atom-shell,stevekinney/electron,sircharleswatson/electron,joneit/electron,bruce/electron,synaptek/electron,MaxGraey/electron,anko/electron,leethomas/electron,aecca/electron,mattdesl/electron,bright-sparks/electron,bbondy/electron,leftstick/electron,vHanda/electron,mjaniszew/electron,medixdev/electron,cqqccqc/electron,aliib/electron,jhen0409/electron,cqqccqc/electron,kenmozi/electron,greyhwndz/electron,MaxWhere/electron,pombredanne/electron,shiftkey/electron,cos2004/electron,twolfson/electron,leolujuyi/electron,thingsinjars/electron,voidbridge/electron,christian-bromann/electron,thomsonreuters/electron,bright-sparks/electron,leethomas/electron,Zagorakiss/electron,LadyNaggaga/electron,Andrey-Pavlov/electron,jacksondc/electron,darwin/electron,shockone/electron,renaesop/electron,rprichard/electron,rreimann/electron,rsvip/electron,jlhbaseball15/electron,yalexx/electron,jtburke/electron,rsvip/electron,tylergibson/electron,ianscrivener/electron,beni55/electron,fireball-x/atom-shell,DivyaKMenon/electron,synaptek/electron,kikong/electron,rajatsingla28/electron,mirrh/electron,kostia/electron,simongregory/electron,thingsinjars/electron,gerhardberger/electron,gstack/infinium-shell,RobertJGabriel/electron,jjz/electron,sky7sea/electron,biblerule/UMCTelnetHub,vHanda/electron,rsvip/electron,mattotodd/electron,trigrass2/electron,SufianHassan/electron,biblerule/UMCTelnetHub,adamjgray/electron,benweissmann/electron,jannishuebl/electron,robinvandernoord/electron,jsutcodes/electron,tonyganch/electron,DivyaKMenon/electron,JesselJohn/electron,thomsonreuters/electron,bright-sparks/electron,wan-qy/electron,beni55/electron,digideskio/electron,renaesop/electron,jacksondc/electron,saronwei/electron,adcentury/electron,takashi/electron,Rokt33r/electron,Jacobichou/electron,jsutcodes/electron,nagyistoce/electron-atom-shell,egoist/electron,rreimann/electron,faizalpribadi/electron,carsonmcdonald/electron,nicobot/electron,eric-seekas/electron,miniak/electron,cqqccqc/electron,iftekeriba/electron,kikong/electron,ankitaggarwal011/electron,stevemao/electron,jacksondc/electron,gabrielPeart/electron,twolfson/electron,icattlecoder/electron,brenca/electron,deed02392/electron,eriser/electron,meowlab/electron,jtburke/electron,mirrh/electron,cqqccqc/electron,GoooIce/electron,nekuz0r/electron,mattotodd/electron,tomashanacek/electron,dongjoon-hyun/electron,michaelchiche/electron,BionicClick/electron,Faiz7412/electron,adamjgray/electron,deepak1556/atom-shell,medixdev/electron,bpasero/electron,rsvip/electron,leolujuyi/electron,jtburke/electron,kostia/electron,gstack/infinium-shell,shockone/electron,adcentury/electron,Andrey-Pavlov/electron,vaginessa/electron,evgenyzinoviev/electron,takashi/electron,cos2004/electron,pombredanne/electron,jsutcodes/electron,adcentury/electron,eric-seekas/electron,howmuchcomputer/electron,sky7sea/electron,fritx/electron,rprichard/electron,roadev/electron,systembugtj/electron,leethomas/electron,shennushi/electron,adamjgray/electron,fabien-d/electron,BionicClick/electron,adcentury/electron,arturts/electron,Gerhut/electron,stevekinney/electron,bbondy/electron,mrwizard82d1/electron,nagyistoce/electron-atom-shell,setzer777/electron,jlord/electron,brave/muon,Ivshti/electron,farmisen/electron,baiwyc119/electron,ervinb/electron,subblue/electron,thomsonreuters/electron,thompsonemerson/electron,oiledCode/electron,chriskdon/electron,mattdesl/electron,simonfork/electron,kikong/electron,aliib/electron,meowlab/electron,minggo/electron,egoist/electron,leftstick/electron,rsvip/electron,DivyaKMenon/electron,mjaniszew/electron,Andrey-Pavlov/electron,bobwol/electron,fomojola/electron,tomashanacek/electron,lrlna/electron,adamjgray/electron,soulteary/electron,etiktin/electron,benweissmann/electron,John-Lin/electron,zhakui/electron,sircharleswatson/electron,vaginessa/electron,RIAEvangelist/electron,ianscrivener/electron,vaginessa/electron,posix4e/electron,tinydew4/electron,nicholasess/electron,setzer777/electron,gerhardberger/electron,wolfflow/electron,vaginessa/electron,sircharleswatson/electron,gabrielPeart/electron,kazupon/electron,gabrielPeart/electron,voidbridge/electron,simonfork/electron,jtburke/electron,edulan/electron,eriser/electron,shennushi/electron,JussMee15/electron,JussMee15/electron,abhishekgahlot/electron,jonatasfreitasv/electron,mubassirhayat/electron,deed02392/electron,gerhardberger/electron,IonicaBizauKitchen/electron,carsonmcdonald/electron,shockone/electron,etiktin/electron,astoilkov/electron,pirafrank/electron,gabriel/electron,JesselJohn/electron,bright-sparks/electron,SufianHassan/electron,vipulroxx/electron,aaron-goshine/electron,evgenyzinoviev/electron,trigrass2/electron,kenmozi/electron,davazp/electron,posix4e/electron,robinvandernoord/electron,GoooIce/electron,thompsonemerson/electron,mjaniszew/electron,kostia/electron,matiasinsaurralde/electron,jsutcodes/electron,etiktin/electron,natgolov/electron,nicobot/electron,aichingm/electron,jjz/electron,meowlab/electron,nicobot/electron,IonicaBizauKitchen/electron,dongjoon-hyun/electron,Ivshti/electron,John-Lin/electron,oiledCode/electron,joneit/electron,webmechanicx/electron,fabien-d/electron,jcblw/electron,trankmichael/electron,jonatasfreitasv/electron,sky7sea/electron,mhkeller/electron,benweissmann/electron,trankmichael/electron,jsutcodes/electron,xiruibing/electron,synaptek/electron,takashi/electron,kostia/electron,lrlna/electron,the-ress/electron,jlord/electron,evgenyzinoviev/electron,saronwei/electron,jonatasfreitasv/electron,simongregory/electron,John-Lin/electron,aecca/electron,arturts/electron,michaelchiche/electron,roadev/electron,the-ress/electron,greyhwndz/electron,tomashanacek/electron,neutrous/electron,lzpfmh/electron,JussMee15/electron,thomsonreuters/electron,jsutcodes/electron,jhen0409/electron,leftstick/electron,bitemyapp/electron,smczk/electron,icattlecoder/electron,kazupon/electron,fomojola/electron,deepak1556/atom-shell,aliib/electron,systembugtj/electron,arusakov/electron,timruffles/electron,Gerhut/electron,trigrass2/electron,gamedevsam/electron,Andrey-Pavlov/electron,electron/electron,oiledCode/electron,carsonmcdonald/electron,tylergibson/electron,leethomas/electron,howmuchcomputer/electron,farmisen/electron,ankitaggarwal011/electron,icattlecoder/electron,jlord/electron,thingsinjars/electron,maxogden/atom-shell,baiwyc119/electron,darwin/electron,xfstudio/electron,Jacobichou/electron,shennushi/electron,mubassirhayat/electron,dahal/electron,bright-sparks/electron,sircharleswatson/electron,d-salas/electron,medixdev/electron,lzpfmh/electron,sircharleswatson/electron,cos2004/electron,shennushi/electron,maxogden/atom-shell,rhencke/electron,smczk/electron,medixdev/electron,adcentury/electron,bbondy/electron,tincan24/electron,d-salas/electron,subblue/electron,eric-seekas/electron,Zagorakiss/electron,aichingm/electron,arusakov/electron,Jonekee/electron,darwin/electron,robinvandernoord/electron,jaanus/electron,chriskdon/electron,Jonekee/electron,systembugtj/electron,tomashanacek/electron,noikiy/electron,noikiy/electron,destan/electron,Ivshti/electron,bpasero/electron,etiktin/electron,destan/electron,chriskdon/electron,DivyaKMenon/electron,aaron-goshine/electron,gbn972/electron,fritx/electron,Zagorakiss/electron,leftstick/electron,pombredanne/electron,mhkeller/electron,shaundunne/electron,micalan/electron,sky7sea/electron,gamedevsam/electron,zhakui/electron,xfstudio/electron,chriskdon/electron,dahal/electron,gstack/infinium-shell,RIAEvangelist/electron,vipulroxx/electron,aaron-goshine/electron,icattlecoder/electron,dongjoon-hyun/electron,jcblw/electron,gbn972/electron,dongjoon-hyun/electron,vHanda/electron,neutrous/electron,gerhardberger/electron,gabrielPeart/electron,neutrous/electron,rajatsingla28/electron,seanchas116/electron,RobertJGabriel/electron,anko/electron,trigrass2/electron,shaundunne/electron,medixdev/electron,Neron-X5/electron,thompsonemerson/electron,anko/electron,christian-bromann/electron,christian-bromann/electron,jannishuebl/electron,soulteary/electron,jiaz/electron,the-ress/electron,voidbridge/electron,etiktin/electron,joaomoreno/atom-shell,jcblw/electron,bitemyapp/electron,nekuz0r/electron,roadev/electron,arturts/electron,rajatsingla28/electron,tonyganch/electron,stevekinney/electron,wan-qy/electron,mirrh/electron,neutrous/electron,simonfork/electron,matiasinsaurralde/electron,takashi/electron,brave/electron,preco21/electron,jlhbaseball15/electron,abhishekgahlot/electron,nicobot/electron,fomojola/electron,brenca/electron,egoist/electron,astoilkov/electron,kokdemo/electron,miniak/electron,rhencke/electron,ervinb/electron,sshiting/electron,bpasero/electron,evgenyzinoviev/electron,howmuchcomputer/electron,dahal/electron,jannishuebl/electron,eriser/electron,tonyganch/electron,anko/electron,tinydew4/electron,vaginessa/electron,tomashanacek/electron,tylergibson/electron,SufianHassan/electron,oiledCode/electron,ervinb/electron,brave/electron,matiasinsaurralde/electron,SufianHassan/electron,darwin/electron,fomojola/electron,jlord/electron,stevekinney/electron,posix4e/electron,kostia/electron,brave/electron,fabien-d/electron,shiftkey/electron,MaxWhere/electron,SufianHassan/electron,michaelchiche/electron,egoist/electron,pandoraui/electron,lzpfmh/electron,kokdemo/electron,synaptek/electron,rhencke/electron,SufianHassan/electron,digideskio/electron,adamjgray/electron,Neron-X5/electron,jaanus/electron,leolujuyi/electron,pombredanne/electron,natgolov/electron,carsonmcdonald/electron,trankmichael/electron,abhishekgahlot/electron,LadyNaggaga/electron,jjz/electron,eric-seekas/electron,jiaz/electron,astoilkov/electron,the-ress/electron,rreimann/electron,nicobot/electron,RobertJGabriel/electron,brave/electron,minggo/electron,jannishuebl/electron,minggo/electron,natgolov/electron,abhishekgahlot/electron,farmisen/electron,Gerhut/electron,vaginessa/electron,DivyaKMenon/electron,IonicaBizauKitchen/electron,ankitaggarwal011/electron,edulan/electron,DivyaKMenon/electron,miniak/electron,jcblw/electron,posix4e/electron,farmisen/electron,dahal/electron,renaesop/electron,tonyganch/electron,d-salas/electron,kcrt/electron,cos2004/electron,jtburke/electron,iftekeriba/electron,coderhaoxin/electron,bwiggs/electron,anko/electron,wan-qy/electron,cos2004/electron,bbondy/electron,bwiggs/electron,michaelchiche/electron,mattotodd/electron,kazupon/electron,bobwol/electron,benweissmann/electron,mrwizard82d1/electron,oiledCode/electron,setzer777/electron,yalexx/electron,GoooIce/electron,pandoraui/electron,jlhbaseball15/electron,aichingm/electron,mjaniszew/electron,deed02392/electron,electron/electron,saronwei/electron,bobwol/electron,yan-foto/electron,christian-bromann/electron,fireball-x/atom-shell,twolfson/electron,abhishekgahlot/electron,soulteary/electron,pirafrank/electron,MaxGraey/electron,d-salas/electron,electron/electron,yan-foto/electron,yalexx/electron,stevekinney/electron,ankitaggarwal011/electron,yan-foto/electron,tincan24/electron,miniak/electron,destan/electron,arusakov/electron,davazp/electron,deepak1556/atom-shell,edulan/electron,shockone/electron,gerhardberger/electron,bruce/electron,yan-foto/electron,soulteary/electron,renaesop/electron,kcrt/electron,beni55/electron,digideskio/electron,fffej/electron,kokdemo/electron,bitemyapp/electron,gabriel/electron,jaanus/electron,Gerhut/electron,leolujuyi/electron,micalan/electron,bruce/electron,electron/electron,nagyistoce/electron-atom-shell,gabriel/electron,nekuz0r/electron,neutrous/electron,jcblw/electron,thompsonemerson/electron,jhen0409/electron,John-Lin/electron,kikong/electron,iftekeriba/electron,leolujuyi/electron,mattdesl/electron,saronwei/electron,jjz/electron,Rokt33r/electron,timruffles/electron,LadyNaggaga/electron,tinydew4/electron,jlhbaseball15/electron,roadev/electron,synaptek/electron,pombredanne/electron,soulteary/electron,robinvandernoord/electron,takashi/electron,dkfiresky/electron,ianscrivener/electron,kokdemo/electron,fffej/electron,farmisen/electron,JesselJohn/electron,gstack/infinium-shell,tincan24/electron,kokdemo/electron,aichingm/electron,fritx/electron,aliib/electron,RobertJGabriel/electron,eriser/electron,yalexx/electron,Floato/electron,Rokt33r/electron,joneit/electron,christian-bromann/electron,gabriel/electron,nekuz0r/electron,shennushi/electron,wolfflow/electron,tinydew4/electron,webmechanicx/electron,pandoraui/electron,mattotodd/electron,iftekeriba/electron,mirrh/electron,Floato/electron,xiruibing/electron,destan/electron,evgenyzinoviev/electron,felixrieseberg/electron,gamedevsam/electron,IonicaBizauKitchen/electron,zhakui/electron,trankmichael/electron,tylergibson/electron,timruffles/electron,nicholasess/electron,felixrieseberg/electron,mhkeller/electron,bbondy/electron,dahal/electron,arturts/electron,RobertJGabriel/electron,howmuchcomputer/electron,vHanda/electron,rajatsingla28/electron,evgenyzinoviev/electron,seanchas116/electron,tonyganch/electron,nicholasess/electron,jcblw/electron,Evercoder/electron,Gerhut/electron,mattotodd/electron,simonfork/electron,mhkeller/electron,bpasero/electron,Rokt33r/electron,baiwyc119/electron,kcrt/electron,jhen0409/electron,Neron-X5/electron,simongregory/electron,renaesop/electron,RIAEvangelist/electron,sshiting/electron,jiaz/electron,xfstudio/electron,neutrous/electron,aecca/electron,leethomas/electron,miniak/electron,faizalpribadi/electron,MaxGraey/electron,noikiy/electron,rhencke/electron,GoooIce/electron,chrisswk/electron,beni55/electron,jjz/electron,nekuz0r/electron,hokein/atom-shell,BionicClick/electron,lzpfmh/electron,brave/electron,gabriel/electron,Zagorakiss/electron,kcrt/electron,IonicaBizauKitchen/electron,preco21/electron,trigrass2/electron,greyhwndz/electron,Gerhut/electron,joneit/electron,Jonekee/electron,shiftkey/electron,tincan24/electron,aecca/electron,pandoraui/electron,nicholasess/electron,smczk/electron,darwin/electron,nicholasess/electron,faizalpribadi/electron,arturts/electron,stevekinney/electron,christian-bromann/electron,timruffles/electron,timruffles/electron,jlhbaseball15/electron,biblerule/UMCTelnetHub,leftstick/electron,simongregory/electron,voidbridge/electron,icattlecoder/electron,Floato/electron,Ivshti/electron,MaxWhere/electron,davazp/electron,mirrh/electron,lzpfmh/electron,vipulroxx/electron,faizalpribadi/electron,minggo/electron,lrlna/electron,simonfork/electron,Andrey-Pavlov/electron,fireball-x/atom-shell,digideskio/electron,chriskdon/electron,renaesop/electron,aichingm/electron,BionicClick/electron,adamjgray/electron,eric-seekas/electron,edulan/electron,deed02392/electron,micalan/electron,vHanda/electron,subblue/electron,greyhwndz/electron,jannishuebl/electron,brave/muon,brenca/electron,gbn972/electron,thingsinjars/electron,zhakui/electron,aecca/electron,mjaniszew/electron,leftstick/electron,simonfork/electron,kcrt/electron,Neron-X5/electron,pirafrank/electron,roadev/electron,jaanus/electron,dongjoon-hyun/electron,rhencke/electron,rprichard/electron,gamedevsam/electron,micalan/electron,kokdemo/electron,maxogden/atom-shell,twolfson/electron,vipulroxx/electron,thompsonemerson/electron,shockone/electron,bwiggs/electron,shennushi/electron,baiwyc119/electron,sky7sea/electron,kenmozi/electron,joaomoreno/atom-shell,maxogden/atom-shell,soulteary/electron,xiruibing/electron,yalexx/electron,jacksondc/electron,fritx/electron,bpasero/electron,davazp/electron,xiruibing/electron,adcentury/electron,rreimann/electron,mattotodd/electron,Jacobichou/electron,brave/muon,kikong/electron,micalan/electron,jhen0409/electron,webmechanicx/electron,seanchas116/electron,aecca/electron,cqqccqc/electron,hokein/atom-shell,RobertJGabriel/electron,anko/electron,greyhwndz/electron,miniak/electron,felixrieseberg/electron,jiaz/electron,aliib/electron,webmechanicx/electron,fabien-d/electron,coderhaoxin/electron,preco21/electron,aaron-goshine/electron,sky7sea/electron,simongregory/electron,davazp/electron,Andrey-Pavlov/electron,dkfiresky/electron,JussMee15/electron,setzer777/electron,setzer777/electron,oiledCode/electron,tinydew4/electron,aaron-goshine/electron,brave/electron,MaxWhere/electron,Faiz7412/electron,greyhwndz/electron,smczk/electron,the-ress/electron,carsonmcdonald/electron,digideskio/electron,stevemao/electron,bobwol/electron,BionicClick/electron,xiruibing/electron,GoooIce/electron,rajatsingla28/electron,Faiz7412/electron,xfstudio/electron,posix4e/electron,robinvandernoord/electron,LadyNaggaga/electron,Evercoder/electron,mubassirhayat/electron,joaomoreno/atom-shell,mirrh/electron,matiasinsaurralde/electron,eric-seekas/electron,rreimann/electron,cqqccqc/electron,jhen0409/electron,beni55/electron,wan-qy/electron,benweissmann/electron,pirafrank/electron,MaxWhere/electron,howmuchcomputer/electron,tonyganch/electron,mattdesl/electron,Neron-X5/electron,nicobot/electron,vipulroxx/electron,dahal/electron,sshiting/electron,natgolov/electron,Faiz7412/electron,pandoraui/electron,setzer777/electron,fomojola/electron,Faiz7412/electron,felixrieseberg/electron,bitemyapp/electron,brenca/electron,shaundunne/electron,sshiting/electron,davazp/electron,jannishuebl/electron,jlhbaseball15/electron,seanchas116/electron,mattdesl/electron,jiaz/electron,Evercoder/electron,jacksondc/electron,shaundunne/electron,BionicClick/electron,jtburke/electron,MaxGraey/electron,voidbridge/electron,abhishekgahlot/electron,ervinb/electron,baiwyc119/electron,carsonmcdonald/electron,jlord/electron,bobwol/electron,LadyNaggaga/electron,tincan24/electron,noikiy/electron,the-ress/electron,Floato/electron,saronwei/electron,takashi/electron,tylergibson/electron,minggo/electron,egoist/electron,deed02392/electron,systembugtj/electron,destan/electron,smczk/electron,fffej/electron,biblerule/UMCTelnetHub,jacksondc/electron,Jonekee/electron,ervinb/electron,joaomoreno/atom-shell,gabriel/electron,mhkeller/electron,eriser/electron,mhkeller/electron,leethomas/electron,stevemao/electron,eriser/electron,bitemyapp/electron,nagyistoce/electron-atom-shell,michaelchiche/electron,JesselJohn/electron,GoooIce/electron,John-Lin/electron,dongjoon-hyun/electron,tinydew4/electron,noikiy/electron,Evercoder/electron,fritx/electron,natgolov/electron,biblerule/UMCTelnetHub,coderhaoxin/electron,jjz/electron,joneit/electron,dkfiresky/electron,vipulroxx/electron,tomashanacek/electron,mubassirhayat/electron,chrisswk/electron,wan-qy/electron,jonatasfreitasv/electron,xfstudio/electron,gstack/infinium-shell,fomojola/electron,JesselJohn/electron,mjaniszew/electron,egoist/electron,mattdesl/electron,joaomoreno/atom-shell,joaomoreno/atom-shell,trankmichael/electron,bruce/electron,d-salas/electron,iftekeriba/electron,bpasero/electron,RIAEvangelist/electron,pombredanne/electron,rprichard/electron,Neron-X5/electron,gerhardberger/electron,iftekeriba/electron
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell/releases/download/v0.11.10' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main()) Use atom/atom-shell-frameworks until atom/atom-shell is public
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell-frameworks/releases/download/v0.0.1' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
<commit_before>#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell/releases/download/v0.11.10' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main()) <commit_msg>Use atom/atom-shell-frameworks until atom/atom-shell is public<commit_after>
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell-frameworks/releases/download/v0.0.1' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell/releases/download/v0.11.10' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main()) Use atom/atom-shell-frameworks until atom/atom-shell is public#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell-frameworks/releases/download/v0.0.1' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
<commit_before>#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell/releases/download/v0.11.10' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main()) <commit_msg>Use atom/atom-shell-frameworks until atom/atom-shell is public<commit_after>#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell-frameworks/releases/download/v0.0.1' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
9445c23e70cabe519d51282bf4849a8d08e21039
robotpy_ext/misc/precise_delay.py
robotpy_ext/misc/precise_delay.py
import wpilib class PreciseDelay: ''' Used to synchronize a timing loop. Usage:: delay = PreciseDelay(time_to_delay) while something: # do things here delay.wait() ''' def __init__(self, delay_period): ''' :param delay_period: The amount of time to do a delay :type delay_period: float ''' self.delay = wpilib.Timer.delay self.timer = wpilib.Timer() self.delay_period = delay_period self.timer.start() def wait(self): '''Waits until the delay period has passed''' # we must *always* yield here, so other things can run self.delay(0.001) while not self.timer.hasPeriodPassed(self.delay_period): self.delay(0.001)
import hal import time import wpilib class PreciseDelay: ''' Used to synchronize a timing loop. Will delay precisely so that the next invocation of your loop happens at the same period, as long as your code does not run longer than the length of the delay. Our experience has shown that 25ms is a good loop period. Usage:: delay = PreciseDelay(time_to_delay) while something: # do things here delay.wait() ''' def __init__(self, delay_period): ''' :param delay_period: The amount of time (in seconds) to do a delay :type delay_period: float ''' # The WPILib sleep/etc functions are slightly less stable as # they have more overhead, so only use them in simulation mode if hal.HALIsSimulation: self.delay = wpilib.Timer.delay self.get_now = wpilib.Timer.getFPGATimestamp else: self.delay = time.sleep self.get_now = time.time self.delay_period = float(delay_period) if self.delay_period < 0.001: raise ValueError("You probably don't want to delay less than 1ms!") self.next_delay = self.get_now() + self.delay_period def wait(self): '''Waits until the delay period has passed''' # optimization -- avoid local lookups delay = self.delay get_now = self.get_now next_delay = self.next_delay while True: # we must *always* yield here, so other things can run delay(0.0002) if next_delay < get_now(): break self.next_delay += self.delay_period
Fix PreciseDelay to work properly
Fix PreciseDelay to work properly
Python
bsd-3-clause
robotpy/robotpy-wpilib-utilities,Twinters007/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,Twinters007/robotpy-wpilib-utilities
import wpilib class PreciseDelay: ''' Used to synchronize a timing loop. Usage:: delay = PreciseDelay(time_to_delay) while something: # do things here delay.wait() ''' def __init__(self, delay_period): ''' :param delay_period: The amount of time to do a delay :type delay_period: float ''' self.delay = wpilib.Timer.delay self.timer = wpilib.Timer() self.delay_period = delay_period self.timer.start() def wait(self): '''Waits until the delay period has passed''' # we must *always* yield here, so other things can run self.delay(0.001) while not self.timer.hasPeriodPassed(self.delay_period): self.delay(0.001)Fix PreciseDelay to work properly
import hal import time import wpilib class PreciseDelay: ''' Used to synchronize a timing loop. Will delay precisely so that the next invocation of your loop happens at the same period, as long as your code does not run longer than the length of the delay. Our experience has shown that 25ms is a good loop period. Usage:: delay = PreciseDelay(time_to_delay) while something: # do things here delay.wait() ''' def __init__(self, delay_period): ''' :param delay_period: The amount of time (in seconds) to do a delay :type delay_period: float ''' # The WPILib sleep/etc functions are slightly less stable as # they have more overhead, so only use them in simulation mode if hal.HALIsSimulation: self.delay = wpilib.Timer.delay self.get_now = wpilib.Timer.getFPGATimestamp else: self.delay = time.sleep self.get_now = time.time self.delay_period = float(delay_period) if self.delay_period < 0.001: raise ValueError("You probably don't want to delay less than 1ms!") self.next_delay = self.get_now() + self.delay_period def wait(self): '''Waits until the delay period has passed''' # optimization -- avoid local lookups delay = self.delay get_now = self.get_now next_delay = self.next_delay while True: # we must *always* yield here, so other things can run delay(0.0002) if next_delay < get_now(): break self.next_delay += self.delay_period
<commit_before> import wpilib class PreciseDelay: ''' Used to synchronize a timing loop. Usage:: delay = PreciseDelay(time_to_delay) while something: # do things here delay.wait() ''' def __init__(self, delay_period): ''' :param delay_period: The amount of time to do a delay :type delay_period: float ''' self.delay = wpilib.Timer.delay self.timer = wpilib.Timer() self.delay_period = delay_period self.timer.start() def wait(self): '''Waits until the delay period has passed''' # we must *always* yield here, so other things can run self.delay(0.001) while not self.timer.hasPeriodPassed(self.delay_period): self.delay(0.001)<commit_msg>Fix PreciseDelay to work properly<commit_after>
import hal import time import wpilib class PreciseDelay: ''' Used to synchronize a timing loop. Will delay precisely so that the next invocation of your loop happens at the same period, as long as your code does not run longer than the length of the delay. Our experience has shown that 25ms is a good loop period. Usage:: delay = PreciseDelay(time_to_delay) while something: # do things here delay.wait() ''' def __init__(self, delay_period): ''' :param delay_period: The amount of time (in seconds) to do a delay :type delay_period: float ''' # The WPILib sleep/etc functions are slightly less stable as # they have more overhead, so only use them in simulation mode if hal.HALIsSimulation: self.delay = wpilib.Timer.delay self.get_now = wpilib.Timer.getFPGATimestamp else: self.delay = time.sleep self.get_now = time.time self.delay_period = float(delay_period) if self.delay_period < 0.001: raise ValueError("You probably don't want to delay less than 1ms!") self.next_delay = self.get_now() + self.delay_period def wait(self): '''Waits until the delay period has passed''' # optimization -- avoid local lookups delay = self.delay get_now = self.get_now next_delay = self.next_delay while True: # we must *always* yield here, so other things can run delay(0.0002) if next_delay < get_now(): break self.next_delay += self.delay_period
import wpilib class PreciseDelay: ''' Used to synchronize a timing loop. Usage:: delay = PreciseDelay(time_to_delay) while something: # do things here delay.wait() ''' def __init__(self, delay_period): ''' :param delay_period: The amount of time to do a delay :type delay_period: float ''' self.delay = wpilib.Timer.delay self.timer = wpilib.Timer() self.delay_period = delay_period self.timer.start() def wait(self): '''Waits until the delay period has passed''' # we must *always* yield here, so other things can run self.delay(0.001) while not self.timer.hasPeriodPassed(self.delay_period): self.delay(0.001)Fix PreciseDelay to work properly import hal import time import wpilib class PreciseDelay: ''' Used to synchronize a timing loop. Will delay precisely so that the next invocation of your loop happens at the same period, as long as your code does not run longer than the length of the delay. Our experience has shown that 25ms is a good loop period. Usage:: delay = PreciseDelay(time_to_delay) while something: # do things here delay.wait() ''' def __init__(self, delay_period): ''' :param delay_period: The amount of time (in seconds) to do a delay :type delay_period: float ''' # The WPILib sleep/etc functions are slightly less stable as # they have more overhead, so only use them in simulation mode if hal.HALIsSimulation: self.delay = wpilib.Timer.delay self.get_now = wpilib.Timer.getFPGATimestamp else: self.delay = time.sleep self.get_now = time.time self.delay_period = float(delay_period) if self.delay_period < 0.001: raise ValueError("You probably don't want to delay less than 1ms!") self.next_delay = self.get_now() + self.delay_period def wait(self): '''Waits until the delay period has passed''' # optimization -- avoid local lookups delay = self.delay get_now = self.get_now next_delay = self.next_delay while True: # we must *always* yield here, so other things can run delay(0.0002) if next_delay < get_now(): break self.next_delay += self.delay_period
<commit_before> import wpilib class PreciseDelay: ''' Used to synchronize a timing loop. Usage:: delay = PreciseDelay(time_to_delay) while something: # do things here delay.wait() ''' def __init__(self, delay_period): ''' :param delay_period: The amount of time to do a delay :type delay_period: float ''' self.delay = wpilib.Timer.delay self.timer = wpilib.Timer() self.delay_period = delay_period self.timer.start() def wait(self): '''Waits until the delay period has passed''' # we must *always* yield here, so other things can run self.delay(0.001) while not self.timer.hasPeriodPassed(self.delay_period): self.delay(0.001)<commit_msg>Fix PreciseDelay to work properly<commit_after> import hal import time import wpilib class PreciseDelay: ''' Used to synchronize a timing loop. Will delay precisely so that the next invocation of your loop happens at the same period, as long as your code does not run longer than the length of the delay. Our experience has shown that 25ms is a good loop period. Usage:: delay = PreciseDelay(time_to_delay) while something: # do things here delay.wait() ''' def __init__(self, delay_period): ''' :param delay_period: The amount of time (in seconds) to do a delay :type delay_period: float ''' # The WPILib sleep/etc functions are slightly less stable as # they have more overhead, so only use them in simulation mode if hal.HALIsSimulation: self.delay = wpilib.Timer.delay self.get_now = wpilib.Timer.getFPGATimestamp else: self.delay = time.sleep self.get_now = time.time self.delay_period = float(delay_period) if self.delay_period < 0.001: raise ValueError("You probably don't want to delay less than 1ms!") self.next_delay = self.get_now() + self.delay_period def wait(self): '''Waits until the delay period has passed''' # optimization -- avoid local lookups delay = self.delay get_now = self.get_now next_delay = self.next_delay while True: # we must *always* yield here, so other things can run delay(0.0002) if next_delay < get_now(): break self.next_delay += self.delay_period
e4fcebfe4e87b57ae8505437f54c69f3afd59c04
python/tests.py
python/tests.py
#!/usr/bin/env python """ Created on Thu 6 March 2014 Contains testing routines for `SolarCoreModel.py`. @author Kristoffer Braekken """ import SolarCoreModel from numpy import log10 def opacity_test(tol=1.e-10): """ Function for testing that the opacity is fetched correctly. """ # Test values T = 10**(5.) # Feth 5.00 row rho = 1.e-6 # Fetch -5.0 column ans = log10(SolarCoreModel.kappa(T, rho)) if abs(ans - (-0.068)) < tol: print 'Sucess.' else: print 'Fail.\n10**kappa =', ans, 'and not -0.068.' if __name__ == '__main__': opacity_test()
#!/usr/bin/env python """ Created on Thu 6 March 2014 Contains testing routines for `SolarCoreModel.py`. @author Kristoffer Braekken """ import SolarCoreModel from numpy import log10 def opacity_test(tol=1.e-10): """ Function for testing that the opacity is fetched correctly. """ # Test values T = 10**(5.) # Feth 5.00 row rho = 1.e-6 # Fetch -5.0 column rho /= 1.e3; rho *= 1./1e6 # Convert to SI units [kg m^-3] ans = log10(SolarCoreModel.kappa(T, rho)) if abs(ans - (-0.068)) < tol: print 'Sucess.' else: print 'Fail.\n10**kappa =', ans, 'and not -0.068.' if __name__ == '__main__': opacity_test()
Fix test to take care of units.
TODO: Fix test to take care of units.
Python
mit
PaulMag/AST3310-Prj01,PaulMag/AST3310-Prj01
#!/usr/bin/env python """ Created on Thu 6 March 2014 Contains testing routines for `SolarCoreModel.py`. @author Kristoffer Braekken """ import SolarCoreModel from numpy import log10 def opacity_test(tol=1.e-10): """ Function for testing that the opacity is fetched correctly. """ # Test values T = 10**(5.) # Feth 5.00 row rho = 1.e-6 # Fetch -5.0 column ans = log10(SolarCoreModel.kappa(T, rho)) if abs(ans - (-0.068)) < tol: print 'Sucess.' else: print 'Fail.\n10**kappa =', ans, 'and not -0.068.' if __name__ == '__main__': opacity_test() TODO: Fix test to take care of units.
#!/usr/bin/env python """ Created on Thu 6 March 2014 Contains testing routines for `SolarCoreModel.py`. @author Kristoffer Braekken """ import SolarCoreModel from numpy import log10 def opacity_test(tol=1.e-10): """ Function for testing that the opacity is fetched correctly. """ # Test values T = 10**(5.) # Feth 5.00 row rho = 1.e-6 # Fetch -5.0 column rho /= 1.e3; rho *= 1./1e6 # Convert to SI units [kg m^-3] ans = log10(SolarCoreModel.kappa(T, rho)) if abs(ans - (-0.068)) < tol: print 'Sucess.' else: print 'Fail.\n10**kappa =', ans, 'and not -0.068.' if __name__ == '__main__': opacity_test()
<commit_before>#!/usr/bin/env python """ Created on Thu 6 March 2014 Contains testing routines for `SolarCoreModel.py`. @author Kristoffer Braekken """ import SolarCoreModel from numpy import log10 def opacity_test(tol=1.e-10): """ Function for testing that the opacity is fetched correctly. """ # Test values T = 10**(5.) # Feth 5.00 row rho = 1.e-6 # Fetch -5.0 column ans = log10(SolarCoreModel.kappa(T, rho)) if abs(ans - (-0.068)) < tol: print 'Sucess.' else: print 'Fail.\n10**kappa =', ans, 'and not -0.068.' if __name__ == '__main__': opacity_test() <commit_msg>TODO: Fix test to take care of units.<commit_after>
#!/usr/bin/env python """ Created on Thu 6 March 2014 Contains testing routines for `SolarCoreModel.py`. @author Kristoffer Braekken """ import SolarCoreModel from numpy import log10 def opacity_test(tol=1.e-10): """ Function for testing that the opacity is fetched correctly. """ # Test values T = 10**(5.) # Feth 5.00 row rho = 1.e-6 # Fetch -5.0 column rho /= 1.e3; rho *= 1./1e6 # Convert to SI units [kg m^-3] ans = log10(SolarCoreModel.kappa(T, rho)) if abs(ans - (-0.068)) < tol: print 'Sucess.' else: print 'Fail.\n10**kappa =', ans, 'and not -0.068.' if __name__ == '__main__': opacity_test()
#!/usr/bin/env python """ Created on Thu 6 March 2014 Contains testing routines for `SolarCoreModel.py`. @author Kristoffer Braekken """ import SolarCoreModel from numpy import log10 def opacity_test(tol=1.e-10): """ Function for testing that the opacity is fetched correctly. """ # Test values T = 10**(5.) # Feth 5.00 row rho = 1.e-6 # Fetch -5.0 column ans = log10(SolarCoreModel.kappa(T, rho)) if abs(ans - (-0.068)) < tol: print 'Sucess.' else: print 'Fail.\n10**kappa =', ans, 'and not -0.068.' if __name__ == '__main__': opacity_test() TODO: Fix test to take care of units.#!/usr/bin/env python """ Created on Thu 6 March 2014 Contains testing routines for `SolarCoreModel.py`. @author Kristoffer Braekken """ import SolarCoreModel from numpy import log10 def opacity_test(tol=1.e-10): """ Function for testing that the opacity is fetched correctly. """ # Test values T = 10**(5.) # Feth 5.00 row rho = 1.e-6 # Fetch -5.0 column rho /= 1.e3; rho *= 1./1e6 # Convert to SI units [kg m^-3] ans = log10(SolarCoreModel.kappa(T, rho)) if abs(ans - (-0.068)) < tol: print 'Sucess.' else: print 'Fail.\n10**kappa =', ans, 'and not -0.068.' if __name__ == '__main__': opacity_test()
<commit_before>#!/usr/bin/env python """ Created on Thu 6 March 2014 Contains testing routines for `SolarCoreModel.py`. @author Kristoffer Braekken """ import SolarCoreModel from numpy import log10 def opacity_test(tol=1.e-10): """ Function for testing that the opacity is fetched correctly. """ # Test values T = 10**(5.) # Feth 5.00 row rho = 1.e-6 # Fetch -5.0 column ans = log10(SolarCoreModel.kappa(T, rho)) if abs(ans - (-0.068)) < tol: print 'Sucess.' else: print 'Fail.\n10**kappa =', ans, 'and not -0.068.' if __name__ == '__main__': opacity_test() <commit_msg>TODO: Fix test to take care of units.<commit_after>#!/usr/bin/env python """ Created on Thu 6 March 2014 Contains testing routines for `SolarCoreModel.py`. @author Kristoffer Braekken """ import SolarCoreModel from numpy import log10 def opacity_test(tol=1.e-10): """ Function for testing that the opacity is fetched correctly. """ # Test values T = 10**(5.) # Feth 5.00 row rho = 1.e-6 # Fetch -5.0 column rho /= 1.e3; rho *= 1./1e6 # Convert to SI units [kg m^-3] ans = log10(SolarCoreModel.kappa(T, rho)) if abs(ans - (-0.068)) < tol: print 'Sucess.' else: print 'Fail.\n10**kappa =', ans, 'and not -0.068.' if __name__ == '__main__': opacity_test()
09a6e2528f062581c90ed3f3225f19b36f0ac0f9
eve_api/forms.py
eve_api/forms.py
import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: return self.cleaned_data else: raise forms.ValidationError("This API User ID is already registered")
import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") return self.cleaned_data['api_key'] def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: pass else: raise forms.ValidationError("This API User ID is already registered") return self.cleaned_data['user_id']
Fix the validation data on the EVEAPIForm
Fix the validation data on the EVEAPIForm
Python
bsd-3-clause
nikdoof/test-auth
import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: return self.cleaned_data else: raise forms.ValidationError("This API User ID is already registered") Fix the validation data on the EVEAPIForm
import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") return self.cleaned_data['api_key'] def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: pass else: raise forms.ValidationError("This API User ID is already registered") return self.cleaned_data['user_id']
<commit_before>import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: return self.cleaned_data else: raise forms.ValidationError("This API User ID is already registered") <commit_msg>Fix the validation data on the EVEAPIForm<commit_after>
import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") return self.cleaned_data['api_key'] def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: pass else: raise forms.ValidationError("This API User ID is already registered") return self.cleaned_data['user_id']
import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: return self.cleaned_data else: raise forms.ValidationError("This API User ID is already registered") Fix the validation data on the EVEAPIFormimport re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") return self.cleaned_data['api_key'] def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: pass else: raise forms.ValidationError("This API User ID is already registered") return self.cleaned_data['user_id']
<commit_before>import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: return self.cleaned_data else: raise forms.ValidationError("This API User ID is already registered") <commit_msg>Fix the validation data on the EVEAPIForm<commit_after>import re from django import forms from eve_api.models import EVEAccount, EVEPlayerCharacter, EVEPlayerCorporation class EveAPIForm(forms.Form): """ EVE API input form """ user_id = forms.IntegerField(label=u'User ID') api_key = forms.CharField(label=u'API Key', max_length=64) description = forms.CharField(max_length=100, required=False) def clean_api_key(self): if not len(self.cleaned_data['api_key']) == 64: raise forms.ValidationError("Provided API Key is not 64 characters long.") if re.search(r'[^\.a-zA-Z0-9]', self.cleaned_data['api_key']): raise forms.ValidationError("Provided API Key has invalid characters.") return self.cleaned_data['api_key'] def clean_user_id(self): if not 'user_id' in self.cleaned_data or self.cleaned_data['user_id'] == '': raise forms.ValidationError("Please provide a valid User ID") try: eaccount = EVEAccount.objects.get(api_user_id=self.cleaned_data['user_id']) except EVEAccount.DoesNotExist: pass else: raise forms.ValidationError("This API User ID is already registered") return self.cleaned_data['user_id']
216f128bb8baf65a06c1f35356ab0f7fe50db967
telemetry/telemetry/core/backends/chrome/inspector_memory_unittest.py
telemetry/telemetry/core/backends/chrome/inspector_memory_unittest.py
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1) self.assertEqual(counts['node_count'], 14) self.assertEqual(counts['event_listener_count'], 2)
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') # Document_count > 1 indicates that WebCore::Document loaded in Chrome # is leaking! The baseline should exactly match the numbers on: # unittest_data/dom_counter_sample.html # Please contact kouhei@, hajimehoshi@ when rebaselining. counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1, 'Document leak is detected! '+ 'The previous document is likely retained unexpectedly.') self.assertEqual(counts['node_count'], 14, 'Node leak is detected!') self.assertEqual(counts['event_listener_count'], 2, 'EventListener leak is detected!')
Add warnings to inspector DOM count unittest baselines.
Add warnings to inspector DOM count unittest baselines. The unit test failure indicates a serious Document leak, where all WebCore::Document loaded in Chrome is leaking. This CL adds warning comments to the baseline to avoid regressions. BUG=392121 NOTRY=true Review URL: https://codereview.chromium.org/393123003 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@284653 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
SummerLW/Perf-Insight-Report,benschmaus/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult-csm,catapult-project/catapult,sahiljain/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,sahiljain/catapult,benschmaus/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult,sahiljain/catapult,benschmaus/catapult,benschmaus/catapult,sahiljain/catapult
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1) self.assertEqual(counts['node_count'], 14) self.assertEqual(counts['event_listener_count'], 2) Add warnings to inspector DOM count unittest baselines. The unit test failure indicates a serious Document leak, where all WebCore::Document loaded in Chrome is leaking. This CL adds warning comments to the baseline to avoid regressions. BUG=392121 NOTRY=true Review URL: https://codereview.chromium.org/393123003 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@284653 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') # Document_count > 1 indicates that WebCore::Document loaded in Chrome # is leaking! The baseline should exactly match the numbers on: # unittest_data/dom_counter_sample.html # Please contact kouhei@, hajimehoshi@ when rebaselining. counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1, 'Document leak is detected! '+ 'The previous document is likely retained unexpectedly.') self.assertEqual(counts['node_count'], 14, 'Node leak is detected!') self.assertEqual(counts['event_listener_count'], 2, 'EventListener leak is detected!')
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1) self.assertEqual(counts['node_count'], 14) self.assertEqual(counts['event_listener_count'], 2) <commit_msg>Add warnings to inspector DOM count unittest baselines. The unit test failure indicates a serious Document leak, where all WebCore::Document loaded in Chrome is leaking. This CL adds warning comments to the baseline to avoid regressions. BUG=392121 NOTRY=true Review URL: https://codereview.chromium.org/393123003 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@284653 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') # Document_count > 1 indicates that WebCore::Document loaded in Chrome # is leaking! The baseline should exactly match the numbers on: # unittest_data/dom_counter_sample.html # Please contact kouhei@, hajimehoshi@ when rebaselining. counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1, 'Document leak is detected! '+ 'The previous document is likely retained unexpectedly.') self.assertEqual(counts['node_count'], 14, 'Node leak is detected!') self.assertEqual(counts['event_listener_count'], 2, 'EventListener leak is detected!')
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1) self.assertEqual(counts['node_count'], 14) self.assertEqual(counts['event_listener_count'], 2) Add warnings to inspector DOM count unittest baselines. The unit test failure indicates a serious Document leak, where all WebCore::Document loaded in Chrome is leaking. This CL adds warning comments to the baseline to avoid regressions. BUG=392121 NOTRY=true Review URL: https://codereview.chromium.org/393123003 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@284653 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') # Document_count > 1 indicates that WebCore::Document loaded in Chrome # is leaking! The baseline should exactly match the numbers on: # unittest_data/dom_counter_sample.html # Please contact kouhei@, hajimehoshi@ when rebaselining. counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1, 'Document leak is detected! '+ 'The previous document is likely retained unexpectedly.') self.assertEqual(counts['node_count'], 14, 'Node leak is detected!') self.assertEqual(counts['event_listener_count'], 2, 'EventListener leak is detected!')
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1) self.assertEqual(counts['node_count'], 14) self.assertEqual(counts['event_listener_count'], 2) <commit_msg>Add warnings to inspector DOM count unittest baselines. The unit test failure indicates a serious Document leak, where all WebCore::Document loaded in Chrome is leaking. This CL adds warning comments to the baseline to avoid regressions. BUG=392121 NOTRY=true Review URL: https://codereview.chromium.org/393123003 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@284653 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') # Document_count > 1 indicates that WebCore::Document loaded in Chrome # is leaking! The baseline should exactly match the numbers on: # unittest_data/dom_counter_sample.html # Please contact kouhei@, hajimehoshi@ when rebaselining. counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1, 'Document leak is detected! '+ 'The previous document is likely retained unexpectedly.') self.assertEqual(counts['node_count'], 14, 'Node leak is detected!') self.assertEqual(counts['event_listener_count'], 2, 'EventListener leak is detected!')
c721ba7badc0b980d9c58822b5c0b626b1321f1a
grokapi/cli.py
grokapi/cli.py
# -*- coding: utf-8 -*- from queries import Grok def print_monthly_views(site, pages, year, month): grok = Grok(site) for page in pages: result = grok.get_views_for_month(page, year, month) print result['daily_views'] def main(): """ main script. """ from argparse import ArgumentParser description = 'Extract traffic statistics of Wikipedia articles.' parser = ArgumentParser(description=description) parser.add_argument("-l", "--lang", type=str, dest="lang", default="en", required=True, help="Language code for Wikipedia") parser.add_argument("-y", "--year", type=int, dest="year", default="en", required=True, help="Year") parser.add_argument("-m", "--month", type=int, dest="month", default="en", required=True, help="Month") parser.add_argument("page", nargs='*', metavar="PAGE", help='A list of pages') args = parser.parse_args() print_monthly_views(args.lang, args.page, args.year, args.month) if __name__ == '__main__': main()
# -*- coding: utf-8 -*- from queries import Grok def print_monthly_views(site, pages, year, month): grok = Grok(site) for page in pages: result = grok.get_views_for_month(page, year, month) print result['daily_views'] def main(): """ main script. """ from argparse import ArgumentParser description = 'Extract traffic statistics of Wikipedia articles.' parser = ArgumentParser(description=description) parser.add_argument("-l", "--lang", type=str, dest="lang", default="en", required=False, help="Language code for Wikipedia") parser.add_argument("-y", "--year", type=int, dest="year", required=False, help="Year") parser.add_argument("-m", "--month", type=int, dest="month", required=False, help="Month") parser.add_argument("page", nargs='*', metavar="PAGE", help='A list of pages') args = parser.parse_args() print_monthly_views(args.lang, args.page, args.year, args.month) if __name__ == '__main__': main()
Fix default values of Argument Parser
Fix default values of Argument Parser
Python
mit
Commonists/Grokapi
# -*- coding: utf-8 -*- from queries import Grok def print_monthly_views(site, pages, year, month): grok = Grok(site) for page in pages: result = grok.get_views_for_month(page, year, month) print result['daily_views'] def main(): """ main script. """ from argparse import ArgumentParser description = 'Extract traffic statistics of Wikipedia articles.' parser = ArgumentParser(description=description) parser.add_argument("-l", "--lang", type=str, dest="lang", default="en", required=True, help="Language code for Wikipedia") parser.add_argument("-y", "--year", type=int, dest="year", default="en", required=True, help="Year") parser.add_argument("-m", "--month", type=int, dest="month", default="en", required=True, help="Month") parser.add_argument("page", nargs='*', metavar="PAGE", help='A list of pages') args = parser.parse_args() print_monthly_views(args.lang, args.page, args.year, args.month) if __name__ == '__main__': main() Fix default values of Argument Parser
# -*- coding: utf-8 -*- from queries import Grok def print_monthly_views(site, pages, year, month): grok = Grok(site) for page in pages: result = grok.get_views_for_month(page, year, month) print result['daily_views'] def main(): """ main script. """ from argparse import ArgumentParser description = 'Extract traffic statistics of Wikipedia articles.' parser = ArgumentParser(description=description) parser.add_argument("-l", "--lang", type=str, dest="lang", default="en", required=False, help="Language code for Wikipedia") parser.add_argument("-y", "--year", type=int, dest="year", required=False, help="Year") parser.add_argument("-m", "--month", type=int, dest="month", required=False, help="Month") parser.add_argument("page", nargs='*', metavar="PAGE", help='A list of pages') args = parser.parse_args() print_monthly_views(args.lang, args.page, args.year, args.month) if __name__ == '__main__': main()
<commit_before># -*- coding: utf-8 -*- from queries import Grok def print_monthly_views(site, pages, year, month): grok = Grok(site) for page in pages: result = grok.get_views_for_month(page, year, month) print result['daily_views'] def main(): """ main script. """ from argparse import ArgumentParser description = 'Extract traffic statistics of Wikipedia articles.' parser = ArgumentParser(description=description) parser.add_argument("-l", "--lang", type=str, dest="lang", default="en", required=True, help="Language code for Wikipedia") parser.add_argument("-y", "--year", type=int, dest="year", default="en", required=True, help="Year") parser.add_argument("-m", "--month", type=int, dest="month", default="en", required=True, help="Month") parser.add_argument("page", nargs='*', metavar="PAGE", help='A list of pages') args = parser.parse_args() print_monthly_views(args.lang, args.page, args.year, args.month) if __name__ == '__main__': main() <commit_msg>Fix default values of Argument Parser<commit_after>
# -*- coding: utf-8 -*- from queries import Grok def print_monthly_views(site, pages, year, month): grok = Grok(site) for page in pages: result = grok.get_views_for_month(page, year, month) print result['daily_views'] def main(): """ main script. """ from argparse import ArgumentParser description = 'Extract traffic statistics of Wikipedia articles.' parser = ArgumentParser(description=description) parser.add_argument("-l", "--lang", type=str, dest="lang", default="en", required=False, help="Language code for Wikipedia") parser.add_argument("-y", "--year", type=int, dest="year", required=False, help="Year") parser.add_argument("-m", "--month", type=int, dest="month", required=False, help="Month") parser.add_argument("page", nargs='*', metavar="PAGE", help='A list of pages') args = parser.parse_args() print_monthly_views(args.lang, args.page, args.year, args.month) if __name__ == '__main__': main()
# -*- coding: utf-8 -*- from queries import Grok def print_monthly_views(site, pages, year, month): grok = Grok(site) for page in pages: result = grok.get_views_for_month(page, year, month) print result['daily_views'] def main(): """ main script. """ from argparse import ArgumentParser description = 'Extract traffic statistics of Wikipedia articles.' parser = ArgumentParser(description=description) parser.add_argument("-l", "--lang", type=str, dest="lang", default="en", required=True, help="Language code for Wikipedia") parser.add_argument("-y", "--year", type=int, dest="year", default="en", required=True, help="Year") parser.add_argument("-m", "--month", type=int, dest="month", default="en", required=True, help="Month") parser.add_argument("page", nargs='*', metavar="PAGE", help='A list of pages') args = parser.parse_args() print_monthly_views(args.lang, args.page, args.year, args.month) if __name__ == '__main__': main() Fix default values of Argument Parser# -*- coding: utf-8 -*- from queries import Grok def print_monthly_views(site, pages, year, month): grok = Grok(site) for page in pages: result = grok.get_views_for_month(page, year, month) print result['daily_views'] def main(): """ main script. """ from argparse import ArgumentParser description = 'Extract traffic statistics of Wikipedia articles.' parser = ArgumentParser(description=description) parser.add_argument("-l", "--lang", type=str, dest="lang", default="en", required=False, help="Language code for Wikipedia") parser.add_argument("-y", "--year", type=int, dest="year", required=False, help="Year") parser.add_argument("-m", "--month", type=int, dest="month", required=False, help="Month") parser.add_argument("page", nargs='*', metavar="PAGE", help='A list of pages') args = parser.parse_args() print_monthly_views(args.lang, args.page, args.year, args.month) if __name__ == '__main__': main()
<commit_before># -*- coding: utf-8 -*- from queries import Grok def print_monthly_views(site, pages, year, month): grok = Grok(site) for page in pages: result = grok.get_views_for_month(page, year, month) print result['daily_views'] def main(): """ main script. """ from argparse import ArgumentParser description = 'Extract traffic statistics of Wikipedia articles.' parser = ArgumentParser(description=description) parser.add_argument("-l", "--lang", type=str, dest="lang", default="en", required=True, help="Language code for Wikipedia") parser.add_argument("-y", "--year", type=int, dest="year", default="en", required=True, help="Year") parser.add_argument("-m", "--month", type=int, dest="month", default="en", required=True, help="Month") parser.add_argument("page", nargs='*', metavar="PAGE", help='A list of pages') args = parser.parse_args() print_monthly_views(args.lang, args.page, args.year, args.month) if __name__ == '__main__': main() <commit_msg>Fix default values of Argument Parser<commit_after># -*- coding: utf-8 -*- from queries import Grok def print_monthly_views(site, pages, year, month): grok = Grok(site) for page in pages: result = grok.get_views_for_month(page, year, month) print result['daily_views'] def main(): """ main script. """ from argparse import ArgumentParser description = 'Extract traffic statistics of Wikipedia articles.' parser = ArgumentParser(description=description) parser.add_argument("-l", "--lang", type=str, dest="lang", default="en", required=False, help="Language code for Wikipedia") parser.add_argument("-y", "--year", type=int, dest="year", required=False, help="Year") parser.add_argument("-m", "--month", type=int, dest="month", required=False, help="Month") parser.add_argument("page", nargs='*', metavar="PAGE", help='A list of pages') args = parser.parse_args() print_monthly_views(args.lang, args.page, args.year, args.month) if __name__ == '__main__': main()
73b7da1a0360f50e660e1983ec02dd5225bde3a3
mitmproxy/platform/__init__.py
mitmproxy/platform/__init__.py
import sys resolver = None if sys.platform == "linux2": from . import linux resolver = linux.Resolver elif sys.platform == "darwin": from . import osx resolver = osx.Resolver elif sys.platform.startswith("freebsd"): from . import osx resolver = osx.Resolver elif sys.platform == "win32": from . import windows resolver = windows.Resolver
import sys import re resolver = None if re.match(r"linux(?:2)?", sys.platform): from . import linux resolver = linux.Resolver elif sys.platform == "darwin": from . import osx resolver = osx.Resolver elif sys.platform.startswith("freebsd"): from . import osx resolver = osx.Resolver elif sys.platform == "win32": from . import windows resolver = windows.Resolver
Fix platform import on Linux using python3
Fix platform import on Linux using python3 Using python3, sys.platform returns "linux" instead of "linux2" using python2. This patch accepts "linux" as well as "linux2".
Python
mit
mosajjal/mitmproxy,vhaupert/mitmproxy,laurmurclar/mitmproxy,Kriechi/mitmproxy,dwfreed/mitmproxy,xaxa89/mitmproxy,ujjwal96/mitmproxy,mosajjal/mitmproxy,Kriechi/mitmproxy,mitmproxy/mitmproxy,mitmproxy/mitmproxy,laurmurclar/mitmproxy,ujjwal96/mitmproxy,vhaupert/mitmproxy,zlorb/mitmproxy,StevenVanAcker/mitmproxy,Kriechi/mitmproxy,ddworken/mitmproxy,mitmproxy/mitmproxy,mosajjal/mitmproxy,ujjwal96/mitmproxy,mhils/mitmproxy,mosajjal/mitmproxy,StevenVanAcker/mitmproxy,xaxa89/mitmproxy,cortesi/mitmproxy,StevenVanAcker/mitmproxy,zlorb/mitmproxy,MatthewShao/mitmproxy,StevenVanAcker/mitmproxy,mhils/mitmproxy,zlorb/mitmproxy,mitmproxy/mitmproxy,dwfreed/mitmproxy,MatthewShao/mitmproxy,jvillacorta/mitmproxy,mhils/mitmproxy,MatthewShao/mitmproxy,Kriechi/mitmproxy,xaxa89/mitmproxy,ddworken/mitmproxy,mitmproxy/mitmproxy,dwfreed/mitmproxy,ddworken/mitmproxy,cortesi/mitmproxy,laurmurclar/mitmproxy,cortesi/mitmproxy,jvillacorta/mitmproxy,dwfreed/mitmproxy,MatthewShao/mitmproxy,mhils/mitmproxy,ujjwal96/mitmproxy,mhils/mitmproxy,vhaupert/mitmproxy,ddworken/mitmproxy,xaxa89/mitmproxy,vhaupert/mitmproxy,cortesi/mitmproxy,jvillacorta/mitmproxy,jvillacorta/mitmproxy,zlorb/mitmproxy,laurmurclar/mitmproxy
import sys resolver = None if sys.platform == "linux2": from . import linux resolver = linux.Resolver elif sys.platform == "darwin": from . import osx resolver = osx.Resolver elif sys.platform.startswith("freebsd"): from . import osx resolver = osx.Resolver elif sys.platform == "win32": from . import windows resolver = windows.Resolver Fix platform import on Linux using python3 Using python3, sys.platform returns "linux" instead of "linux2" using python2. This patch accepts "linux" as well as "linux2".
import sys import re resolver = None if re.match(r"linux(?:2)?", sys.platform): from . import linux resolver = linux.Resolver elif sys.platform == "darwin": from . import osx resolver = osx.Resolver elif sys.platform.startswith("freebsd"): from . import osx resolver = osx.Resolver elif sys.platform == "win32": from . import windows resolver = windows.Resolver
<commit_before>import sys resolver = None if sys.platform == "linux2": from . import linux resolver = linux.Resolver elif sys.platform == "darwin": from . import osx resolver = osx.Resolver elif sys.platform.startswith("freebsd"): from . import osx resolver = osx.Resolver elif sys.platform == "win32": from . import windows resolver = windows.Resolver <commit_msg>Fix platform import on Linux using python3 Using python3, sys.platform returns "linux" instead of "linux2" using python2. This patch accepts "linux" as well as "linux2".<commit_after>
import sys import re resolver = None if re.match(r"linux(?:2)?", sys.platform): from . import linux resolver = linux.Resolver elif sys.platform == "darwin": from . import osx resolver = osx.Resolver elif sys.platform.startswith("freebsd"): from . import osx resolver = osx.Resolver elif sys.platform == "win32": from . import windows resolver = windows.Resolver
import sys resolver = None if sys.platform == "linux2": from . import linux resolver = linux.Resolver elif sys.platform == "darwin": from . import osx resolver = osx.Resolver elif sys.platform.startswith("freebsd"): from . import osx resolver = osx.Resolver elif sys.platform == "win32": from . import windows resolver = windows.Resolver Fix platform import on Linux using python3 Using python3, sys.platform returns "linux" instead of "linux2" using python2. This patch accepts "linux" as well as "linux2".import sys import re resolver = None if re.match(r"linux(?:2)?", sys.platform): from . import linux resolver = linux.Resolver elif sys.platform == "darwin": from . import osx resolver = osx.Resolver elif sys.platform.startswith("freebsd"): from . import osx resolver = osx.Resolver elif sys.platform == "win32": from . import windows resolver = windows.Resolver
<commit_before>import sys resolver = None if sys.platform == "linux2": from . import linux resolver = linux.Resolver elif sys.platform == "darwin": from . import osx resolver = osx.Resolver elif sys.platform.startswith("freebsd"): from . import osx resolver = osx.Resolver elif sys.platform == "win32": from . import windows resolver = windows.Resolver <commit_msg>Fix platform import on Linux using python3 Using python3, sys.platform returns "linux" instead of "linux2" using python2. This patch accepts "linux" as well as "linux2".<commit_after>import sys import re resolver = None if re.match(r"linux(?:2)?", sys.platform): from . import linux resolver = linux.Resolver elif sys.platform == "darwin": from . import osx resolver = osx.Resolver elif sys.platform.startswith("freebsd"): from . import osx resolver = osx.Resolver elif sys.platform == "win32": from . import windows resolver = windows.Resolver
b19b3d1e3433465e6e05a9b50d79206b7049cbf6
lib/windspharm/__init__.py
lib/windspharm/__init__.py
"""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.0' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass
"""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.x' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass
Reset version number for maintenance branch.
Reset version number for maintenance branch.
Python
mit
nicolasfauchereau/windspharm,ajdawson/windspharm
"""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.0' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass Reset version number for maintenance branch.
"""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.x' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass
<commit_before>"""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.0' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass <commit_msg>Reset version number for maintenance branch.<commit_after>
"""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.x' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass
"""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.0' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass Reset version number for maintenance branch."""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.x' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass
<commit_before>"""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.0' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass <commit_msg>Reset version number for maintenance branch.<commit_after>"""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.x' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass
4b855e62bd4f92c7aa9b2614cb6eb57e112d7db6
reclass/__init__.py
reclass/__init__.py
# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <madduck@madduck.net> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if node is False: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print)
# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <madduck@madduck.net> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if not node: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print)
Allow node to be None to trigger inventory
Allow node to be None to trigger inventory Signed-off-by: martin f. krafft <acc3492a66a5949176a2fc8886cf441478ca46a1@madduck.net>
Python
artistic-2.0
madduck/reclass,rmoorman/reclass,jeroen92/reclass,michaelkuty/reclass,jeroen92/reclass,rmoorman/reclass
# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <madduck@madduck.net> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if node is False: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print) Allow node to be None to trigger inventory Signed-off-by: martin f. krafft <acc3492a66a5949176a2fc8886cf441478ca46a1@madduck.net>
# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <madduck@madduck.net> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if not node: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print)
<commit_before># # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <madduck@madduck.net> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if node is False: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print) <commit_msg>Allow node to be None to trigger inventory Signed-off-by: martin f. krafft <acc3492a66a5949176a2fc8886cf441478ca46a1@madduck.net><commit_after>
# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <madduck@madduck.net> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if not node: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print)
# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <madduck@madduck.net> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if node is False: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print) Allow node to be None to trigger inventory Signed-off-by: martin f. krafft <acc3492a66a5949176a2fc8886cf441478ca46a1@madduck.net># # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <madduck@madduck.net> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if not node: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print)
<commit_before># # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <madduck@madduck.net> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if node is False: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print) <commit_msg>Allow node to be None to trigger inventory Signed-off-by: martin f. krafft <acc3492a66a5949176a2fc8886cf441478ca46a1@madduck.net><commit_after># # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <madduck@madduck.net> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if not node: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print)
d43a08706f3072a0b97d01526ffd0de0d4a4110c
niworkflows/conftest.py
niworkflows/conftest.py
"""py.test configuration""" import os from pathlib import Path import numpy import pytest from .utils.bids import collect_data test_data_env = os.getenv('TEST_DATA_HOME', str(Path.home() / '.cache' / 'stanford-crn')) data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054' @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = numpy doctest_namespace['os'] = os doctest_namespace['Path'] = Path doctest_namespace['datadir'] = data_dir doctest_namespace['bids_collect_data'] = collect_data @pytest.fixture def testdata_dir(): return data_dir
"""py.test configuration""" import os from pathlib import Path import numpy as np import nibabel as nb import pytest import tempfile from .utils.bids import collect_data test_data_env = os.getenv('TEST_DATA_HOME', str(Path.home() / '.cache' / 'stanford-crn')) data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054' @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = np doctest_namespace['os'] = os doctest_namespace['Path'] = Path doctest_namespace['datadir'] = data_dir doctest_namespace['bids_collect_data'] = collect_data tmpdir = tempfile.TemporaryDirectory() nifti_fname = str(Path(tmpdir.name) / 'test.nii.gz') nb.Nifti1Image(np.random.random((5, 5)).astype('f4'), np.eye(4)).to_filename(nifti_fname) doctest_namespace['nifti_fname'] = nifti_fname yield tmpdir.cleanup() @pytest.fixture def testdata_dir(): return data_dir
Make nifti_fname available to doctests
DOCTEST: Make nifti_fname available to doctests
Python
apache-2.0
oesteban/niworkflows,oesteban/niworkflows,poldracklab/niworkflows,oesteban/niworkflows,poldracklab/niworkflows
"""py.test configuration""" import os from pathlib import Path import numpy import pytest from .utils.bids import collect_data test_data_env = os.getenv('TEST_DATA_HOME', str(Path.home() / '.cache' / 'stanford-crn')) data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054' @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = numpy doctest_namespace['os'] = os doctest_namespace['Path'] = Path doctest_namespace['datadir'] = data_dir doctest_namespace['bids_collect_data'] = collect_data @pytest.fixture def testdata_dir(): return data_dir DOCTEST: Make nifti_fname available to doctests
"""py.test configuration""" import os from pathlib import Path import numpy as np import nibabel as nb import pytest import tempfile from .utils.bids import collect_data test_data_env = os.getenv('TEST_DATA_HOME', str(Path.home() / '.cache' / 'stanford-crn')) data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054' @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = np doctest_namespace['os'] = os doctest_namespace['Path'] = Path doctest_namespace['datadir'] = data_dir doctest_namespace['bids_collect_data'] = collect_data tmpdir = tempfile.TemporaryDirectory() nifti_fname = str(Path(tmpdir.name) / 'test.nii.gz') nb.Nifti1Image(np.random.random((5, 5)).astype('f4'), np.eye(4)).to_filename(nifti_fname) doctest_namespace['nifti_fname'] = nifti_fname yield tmpdir.cleanup() @pytest.fixture def testdata_dir(): return data_dir
<commit_before>"""py.test configuration""" import os from pathlib import Path import numpy import pytest from .utils.bids import collect_data test_data_env = os.getenv('TEST_DATA_HOME', str(Path.home() / '.cache' / 'stanford-crn')) data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054' @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = numpy doctest_namespace['os'] = os doctest_namespace['Path'] = Path doctest_namespace['datadir'] = data_dir doctest_namespace['bids_collect_data'] = collect_data @pytest.fixture def testdata_dir(): return data_dir <commit_msg>DOCTEST: Make nifti_fname available to doctests<commit_after>
"""py.test configuration""" import os from pathlib import Path import numpy as np import nibabel as nb import pytest import tempfile from .utils.bids import collect_data test_data_env = os.getenv('TEST_DATA_HOME', str(Path.home() / '.cache' / 'stanford-crn')) data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054' @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = np doctest_namespace['os'] = os doctest_namespace['Path'] = Path doctest_namespace['datadir'] = data_dir doctest_namespace['bids_collect_data'] = collect_data tmpdir = tempfile.TemporaryDirectory() nifti_fname = str(Path(tmpdir.name) / 'test.nii.gz') nb.Nifti1Image(np.random.random((5, 5)).astype('f4'), np.eye(4)).to_filename(nifti_fname) doctest_namespace['nifti_fname'] = nifti_fname yield tmpdir.cleanup() @pytest.fixture def testdata_dir(): return data_dir
"""py.test configuration""" import os from pathlib import Path import numpy import pytest from .utils.bids import collect_data test_data_env = os.getenv('TEST_DATA_HOME', str(Path.home() / '.cache' / 'stanford-crn')) data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054' @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = numpy doctest_namespace['os'] = os doctest_namespace['Path'] = Path doctest_namespace['datadir'] = data_dir doctest_namespace['bids_collect_data'] = collect_data @pytest.fixture def testdata_dir(): return data_dir DOCTEST: Make nifti_fname available to doctests"""py.test configuration""" import os from pathlib import Path import numpy as np import nibabel as nb import pytest import tempfile from .utils.bids import collect_data test_data_env = os.getenv('TEST_DATA_HOME', str(Path.home() / '.cache' / 'stanford-crn')) data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054' @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = np doctest_namespace['os'] = os doctest_namespace['Path'] = Path doctest_namespace['datadir'] = data_dir doctest_namespace['bids_collect_data'] = collect_data tmpdir = tempfile.TemporaryDirectory() nifti_fname = str(Path(tmpdir.name) / 'test.nii.gz') nb.Nifti1Image(np.random.random((5, 5)).astype('f4'), np.eye(4)).to_filename(nifti_fname) doctest_namespace['nifti_fname'] = nifti_fname yield tmpdir.cleanup() @pytest.fixture def testdata_dir(): return data_dir
<commit_before>"""py.test configuration""" import os from pathlib import Path import numpy import pytest from .utils.bids import collect_data test_data_env = os.getenv('TEST_DATA_HOME', str(Path.home() / '.cache' / 'stanford-crn')) data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054' @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = numpy doctest_namespace['os'] = os doctest_namespace['Path'] = Path doctest_namespace['datadir'] = data_dir doctest_namespace['bids_collect_data'] = collect_data @pytest.fixture def testdata_dir(): return data_dir <commit_msg>DOCTEST: Make nifti_fname available to doctests<commit_after>"""py.test configuration""" import os from pathlib import Path import numpy as np import nibabel as nb import pytest import tempfile from .utils.bids import collect_data test_data_env = os.getenv('TEST_DATA_HOME', str(Path.home() / '.cache' / 'stanford-crn')) data_dir = Path(test_data_env) / 'BIDS-examples-1-enh-ds054' @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace['np'] = np doctest_namespace['os'] = os doctest_namespace['Path'] = Path doctest_namespace['datadir'] = data_dir doctest_namespace['bids_collect_data'] = collect_data tmpdir = tempfile.TemporaryDirectory() nifti_fname = str(Path(tmpdir.name) / 'test.nii.gz') nb.Nifti1Image(np.random.random((5, 5)).astype('f4'), np.eye(4)).to_filename(nifti_fname) doctest_namespace['nifti_fname'] = nifti_fname yield tmpdir.cleanup() @pytest.fixture def testdata_dir(): return data_dir
e8afa1408618d7dc4e39b84963199dd87c217ef9
app/main/views/buyers.py
app/main/views/buyers.py
from flask import render_template, request, flash from flask_login import login_required from .. import main from ... import data_api_client from ..auth import role_required @main.route('/buyers', methods=['GET']) @login_required @role_required('admin') def find_buyer_by_brief_id(): brief_id = request.args.get('brief_id') try: brief = data_api_client.get_brief(brief_id).get('briefs') except: flash('no_brief', 'error') return render_template( "view_buyers.html", users=list(), title=None, brief_id=brief_id ), 404 users = brief.get('users') title = brief.get('title') return render_template( "view_buyers.html", users=users, title=title, brief_id=brief_id )
from flask import render_template, request, flash from flask_login import login_required from .. import main from ... import data_api_client from ..auth import role_required @main.route('/buyers', methods=['GET']) @login_required @role_required('admin') def find_buyer_by_brief_id(): brief_id = request.args.get('brief_id') try: brief = data_api_client.get_brief(brief_id).get('briefs') except: flash('no_brief', 'error') return render_template( "view_buyers.html", users=list(), brief_id=brief_id ), 404 users = brief.get('users') title = brief.get('title') return render_template( "view_buyers.html", users=users, title=title, brief_id=brief_id )
Remove unnecessary variable from route
Remove unnecessary variable from route Jinja will set any variable it can't find to None, so the title variable is unnecessary.
Python
mit
alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
from flask import render_template, request, flash from flask_login import login_required from .. import main from ... import data_api_client from ..auth import role_required @main.route('/buyers', methods=['GET']) @login_required @role_required('admin') def find_buyer_by_brief_id(): brief_id = request.args.get('brief_id') try: brief = data_api_client.get_brief(brief_id).get('briefs') except: flash('no_brief', 'error') return render_template( "view_buyers.html", users=list(), title=None, brief_id=brief_id ), 404 users = brief.get('users') title = brief.get('title') return render_template( "view_buyers.html", users=users, title=title, brief_id=brief_id ) Remove unnecessary variable from route Jinja will set any variable it can't find to None, so the title variable is unnecessary.
from flask import render_template, request, flash from flask_login import login_required from .. import main from ... import data_api_client from ..auth import role_required @main.route('/buyers', methods=['GET']) @login_required @role_required('admin') def find_buyer_by_brief_id(): brief_id = request.args.get('brief_id') try: brief = data_api_client.get_brief(brief_id).get('briefs') except: flash('no_brief', 'error') return render_template( "view_buyers.html", users=list(), brief_id=brief_id ), 404 users = brief.get('users') title = brief.get('title') return render_template( "view_buyers.html", users=users, title=title, brief_id=brief_id )
<commit_before>from flask import render_template, request, flash from flask_login import login_required from .. import main from ... import data_api_client from ..auth import role_required @main.route('/buyers', methods=['GET']) @login_required @role_required('admin') def find_buyer_by_brief_id(): brief_id = request.args.get('brief_id') try: brief = data_api_client.get_brief(brief_id).get('briefs') except: flash('no_brief', 'error') return render_template( "view_buyers.html", users=list(), title=None, brief_id=brief_id ), 404 users = brief.get('users') title = brief.get('title') return render_template( "view_buyers.html", users=users, title=title, brief_id=brief_id ) <commit_msg>Remove unnecessary variable from route Jinja will set any variable it can't find to None, so the title variable is unnecessary.<commit_after>
from flask import render_template, request, flash from flask_login import login_required from .. import main from ... import data_api_client from ..auth import role_required @main.route('/buyers', methods=['GET']) @login_required @role_required('admin') def find_buyer_by_brief_id(): brief_id = request.args.get('brief_id') try: brief = data_api_client.get_brief(brief_id).get('briefs') except: flash('no_brief', 'error') return render_template( "view_buyers.html", users=list(), brief_id=brief_id ), 404 users = brief.get('users') title = brief.get('title') return render_template( "view_buyers.html", users=users, title=title, brief_id=brief_id )
from flask import render_template, request, flash from flask_login import login_required from .. import main from ... import data_api_client from ..auth import role_required @main.route('/buyers', methods=['GET']) @login_required @role_required('admin') def find_buyer_by_brief_id(): brief_id = request.args.get('brief_id') try: brief = data_api_client.get_brief(brief_id).get('briefs') except: flash('no_brief', 'error') return render_template( "view_buyers.html", users=list(), title=None, brief_id=brief_id ), 404 users = brief.get('users') title = brief.get('title') return render_template( "view_buyers.html", users=users, title=title, brief_id=brief_id ) Remove unnecessary variable from route Jinja will set any variable it can't find to None, so the title variable is unnecessary.from flask import render_template, request, flash from flask_login import login_required from .. import main from ... import data_api_client from ..auth import role_required @main.route('/buyers', methods=['GET']) @login_required @role_required('admin') def find_buyer_by_brief_id(): brief_id = request.args.get('brief_id') try: brief = data_api_client.get_brief(brief_id).get('briefs') except: flash('no_brief', 'error') return render_template( "view_buyers.html", users=list(), brief_id=brief_id ), 404 users = brief.get('users') title = brief.get('title') return render_template( "view_buyers.html", users=users, title=title, brief_id=brief_id )
<commit_before>from flask import render_template, request, flash from flask_login import login_required from .. import main from ... import data_api_client from ..auth import role_required @main.route('/buyers', methods=['GET']) @login_required @role_required('admin') def find_buyer_by_brief_id(): brief_id = request.args.get('brief_id') try: brief = data_api_client.get_brief(brief_id).get('briefs') except: flash('no_brief', 'error') return render_template( "view_buyers.html", users=list(), title=None, brief_id=brief_id ), 404 users = brief.get('users') title = brief.get('title') return render_template( "view_buyers.html", users=users, title=title, brief_id=brief_id ) <commit_msg>Remove unnecessary variable from route Jinja will set any variable it can't find to None, so the title variable is unnecessary.<commit_after>from flask import render_template, request, flash from flask_login import login_required from .. import main from ... import data_api_client from ..auth import role_required @main.route('/buyers', methods=['GET']) @login_required @role_required('admin') def find_buyer_by_brief_id(): brief_id = request.args.get('brief_id') try: brief = data_api_client.get_brief(brief_id).get('briefs') except: flash('no_brief', 'error') return render_template( "view_buyers.html", users=list(), brief_id=brief_id ), 404 users = brief.get('users') title = brief.get('title') return render_template( "view_buyers.html", users=users, title=title, brief_id=brief_id )
c24bd93a88a3998ac306a82f7c74b0a782aa8e04
bokeh/application.py
bokeh/application.py
from __future__ import absolute_import from .document import Document class Application(object): """An Application is a factory for Document instances""" def __init__(self): self._handlers = [] # TODO (havocp) should this potentially create multiple documents? # or does multiple docs mean multiple Application? def create_document(self): """Loads a new document using the Application's handlers to fill it in.""" doc = Document() for h in self._handlers: # TODO (havocp) we need to check the 'failed' flag on each handler # and build a composite error display. h.modify_document(doc) return doc def add(self, handler): """Add a handler to the pipeline used to initialize new documents.""" self._handlers.append(handler)
from __future__ import absolute_import from .document import Document import logging log = logging.getLogger(__name__) class Application(object): """An Application is a factory for Document instances""" def __init__(self): self._handlers = [] # TODO (havocp) should this potentially create multiple documents? # or does multiple docs mean multiple Application? def create_document(self): """Loads a new document using the Application's handlers to fill it in.""" doc = Document() for h in self._handlers: # TODO (havocp) we need to check the 'failed' flag on each handler # and build a composite error display. h.modify_document(doc) if h.failed: # TODO (havocp) this could be formatted a lot more nicely log.error("Error running application handler %r: %r %r ", h, h.error, h.error_detail) return doc def add(self, handler): """Add a handler to the pipeline used to initialize new documents.""" self._handlers.append(handler)
Add basic reporting of handler errors
Add basic reporting of handler errors This isn't very nice but it's very helpful for debugging :-) Improve it later.
Python
bsd-3-clause
azjps/bokeh,timsnyder/bokeh,quasiben/bokeh,timsnyder/bokeh,azjps/bokeh,philippjfr/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,justacec/bokeh,philippjfr/bokeh,rs2/bokeh,phobson/bokeh,ericmjl/bokeh,stonebig/bokeh,htygithub/bokeh,phobson/bokeh,stonebig/bokeh,Karel-van-de-Plassche/bokeh,DuCorey/bokeh,gpfreitas/bokeh,quasiben/bokeh,DuCorey/bokeh,ericmjl/bokeh,aavanian/bokeh,dennisobrien/bokeh,jakirkham/bokeh,KasperPRasmussen/bokeh,phobson/bokeh,aiguofer/bokeh,draperjames/bokeh,gpfreitas/bokeh,timsnyder/bokeh,DuCorey/bokeh,percyfal/bokeh,timsnyder/bokeh,maxalbert/bokeh,mindriot101/bokeh,msarahan/bokeh,clairetang6/bokeh,dennisobrien/bokeh,htygithub/bokeh,rs2/bokeh,maxalbert/bokeh,jakirkham/bokeh,KasperPRasmussen/bokeh,gpfreitas/bokeh,aiguofer/bokeh,mindriot101/bokeh,aiguofer/bokeh,ptitjano/bokeh,bokeh/bokeh,htygithub/bokeh,clairetang6/bokeh,clairetang6/bokeh,draperjames/bokeh,azjps/bokeh,aavanian/bokeh,htygithub/bokeh,KasperPRasmussen/bokeh,percyfal/bokeh,bokeh/bokeh,stonebig/bokeh,philippjfr/bokeh,schoolie/bokeh,dennisobrien/bokeh,schoolie/bokeh,draperjames/bokeh,azjps/bokeh,Karel-van-de-Plassche/bokeh,draperjames/bokeh,aavanian/bokeh,bokeh/bokeh,maxalbert/bokeh,Karel-van-de-Plassche/bokeh,bokeh/bokeh,ericmjl/bokeh,msarahan/bokeh,justacec/bokeh,aavanian/bokeh,ericmjl/bokeh,bokeh/bokeh,percyfal/bokeh,aavanian/bokeh,quasiben/bokeh,jakirkham/bokeh,gpfreitas/bokeh,ptitjano/bokeh,draperjames/bokeh,schoolie/bokeh,Karel-van-de-Plassche/bokeh,clairetang6/bokeh,dennisobrien/bokeh,msarahan/bokeh,ptitjano/bokeh,rs2/bokeh,rs2/bokeh,schoolie/bokeh,timsnyder/bokeh,mindriot101/bokeh,dennisobrien/bokeh,philippjfr/bokeh,ptitjano/bokeh,percyfal/bokeh,stonebig/bokeh,maxalbert/bokeh,justacec/bokeh,jakirkham/bokeh,aiguofer/bokeh,aiguofer/bokeh,jakirkham/bokeh,ptitjano/bokeh,KasperPRasmussen/bokeh,schoolie/bokeh,azjps/bokeh,KasperPRasmussen/bokeh,percyfal/bokeh,rs2/bokeh,phobson/bokeh,DuCorey/bokeh,phobson/bokeh,msarahan/bokeh,DuCorey/bokeh,ericmjl/bokeh,mindriot101/bokeh,philippjfr/bokeh
from __future__ import absolute_import from .document import Document class Application(object): """An Application is a factory for Document instances""" def __init__(self): self._handlers = [] # TODO (havocp) should this potentially create multiple documents? # or does multiple docs mean multiple Application? def create_document(self): """Loads a new document using the Application's handlers to fill it in.""" doc = Document() for h in self._handlers: # TODO (havocp) we need to check the 'failed' flag on each handler # and build a composite error display. h.modify_document(doc) return doc def add(self, handler): """Add a handler to the pipeline used to initialize new documents.""" self._handlers.append(handler) Add basic reporting of handler errors This isn't very nice but it's very helpful for debugging :-) Improve it later.
from __future__ import absolute_import from .document import Document import logging log = logging.getLogger(__name__) class Application(object): """An Application is a factory for Document instances""" def __init__(self): self._handlers = [] # TODO (havocp) should this potentially create multiple documents? # or does multiple docs mean multiple Application? def create_document(self): """Loads a new document using the Application's handlers to fill it in.""" doc = Document() for h in self._handlers: # TODO (havocp) we need to check the 'failed' flag on each handler # and build a composite error display. h.modify_document(doc) if h.failed: # TODO (havocp) this could be formatted a lot more nicely log.error("Error running application handler %r: %r %r ", h, h.error, h.error_detail) return doc def add(self, handler): """Add a handler to the pipeline used to initialize new documents.""" self._handlers.append(handler)
<commit_before>from __future__ import absolute_import from .document import Document class Application(object): """An Application is a factory for Document instances""" def __init__(self): self._handlers = [] # TODO (havocp) should this potentially create multiple documents? # or does multiple docs mean multiple Application? def create_document(self): """Loads a new document using the Application's handlers to fill it in.""" doc = Document() for h in self._handlers: # TODO (havocp) we need to check the 'failed' flag on each handler # and build a composite error display. h.modify_document(doc) return doc def add(self, handler): """Add a handler to the pipeline used to initialize new documents.""" self._handlers.append(handler) <commit_msg>Add basic reporting of handler errors This isn't very nice but it's very helpful for debugging :-) Improve it later.<commit_after>
from __future__ import absolute_import from .document import Document import logging log = logging.getLogger(__name__) class Application(object): """An Application is a factory for Document instances""" def __init__(self): self._handlers = [] # TODO (havocp) should this potentially create multiple documents? # or does multiple docs mean multiple Application? def create_document(self): """Loads a new document using the Application's handlers to fill it in.""" doc = Document() for h in self._handlers: # TODO (havocp) we need to check the 'failed' flag on each handler # and build a composite error display. h.modify_document(doc) if h.failed: # TODO (havocp) this could be formatted a lot more nicely log.error("Error running application handler %r: %r %r ", h, h.error, h.error_detail) return doc def add(self, handler): """Add a handler to the pipeline used to initialize new documents.""" self._handlers.append(handler)
from __future__ import absolute_import from .document import Document class Application(object): """An Application is a factory for Document instances""" def __init__(self): self._handlers = [] # TODO (havocp) should this potentially create multiple documents? # or does multiple docs mean multiple Application? def create_document(self): """Loads a new document using the Application's handlers to fill it in.""" doc = Document() for h in self._handlers: # TODO (havocp) we need to check the 'failed' flag on each handler # and build a composite error display. h.modify_document(doc) return doc def add(self, handler): """Add a handler to the pipeline used to initialize new documents.""" self._handlers.append(handler) Add basic reporting of handler errors This isn't very nice but it's very helpful for debugging :-) Improve it later.from __future__ import absolute_import from .document import Document import logging log = logging.getLogger(__name__) class Application(object): """An Application is a factory for Document instances""" def __init__(self): self._handlers = [] # TODO (havocp) should this potentially create multiple documents? # or does multiple docs mean multiple Application? def create_document(self): """Loads a new document using the Application's handlers to fill it in.""" doc = Document() for h in self._handlers: # TODO (havocp) we need to check the 'failed' flag on each handler # and build a composite error display. h.modify_document(doc) if h.failed: # TODO (havocp) this could be formatted a lot more nicely log.error("Error running application handler %r: %r %r ", h, h.error, h.error_detail) return doc def add(self, handler): """Add a handler to the pipeline used to initialize new documents.""" self._handlers.append(handler)
<commit_before>from __future__ import absolute_import from .document import Document class Application(object): """An Application is a factory for Document instances""" def __init__(self): self._handlers = [] # TODO (havocp) should this potentially create multiple documents? # or does multiple docs mean multiple Application? def create_document(self): """Loads a new document using the Application's handlers to fill it in.""" doc = Document() for h in self._handlers: # TODO (havocp) we need to check the 'failed' flag on each handler # and build a composite error display. h.modify_document(doc) return doc def add(self, handler): """Add a handler to the pipeline used to initialize new documents.""" self._handlers.append(handler) <commit_msg>Add basic reporting of handler errors This isn't very nice but it's very helpful for debugging :-) Improve it later.<commit_after>from __future__ import absolute_import from .document import Document import logging log = logging.getLogger(__name__) class Application(object): """An Application is a factory for Document instances""" def __init__(self): self._handlers = [] # TODO (havocp) should this potentially create multiple documents? # or does multiple docs mean multiple Application? def create_document(self): """Loads a new document using the Application's handlers to fill it in.""" doc = Document() for h in self._handlers: # TODO (havocp) we need to check the 'failed' flag on each handler # and build a composite error display. h.modify_document(doc) if h.failed: # TODO (havocp) this could be formatted a lot more nicely log.error("Error running application handler %r: %r %r ", h, h.error, h.error_detail) return doc def add(self, handler): """Add a handler to the pipeline used to initialize new documents.""" self._handlers.append(handler)
7f08e4c9fd370e375ad8e174a98478c0281ecb6e
tools/manifest/tool.py
tools/manifest/tool.py
import os import time import pwd from .utils import effective_user class Tool(object): USER_NAME_PATTERN = 'tools.%s' class InvalidToolException(Exception): pass def __init__(self, name, username, uid, gid, home): self.name = name self.uid = uid self.gid = gid self.username = username self.home = home @classmethod def from_name(cls, name): """ Create a Tool instance from a tool name """ username = Tool.USER_NAME_PATTERN % (name, ) try: user_info = pwd.getpwnam(username) except KeyError: # No such user was found raise Tool.InvalidToolException("No tool with name %s" % (name, )) if user_info.pw_uid < 50000: raise Tool.InvalidToolException("uid of tools should be < 50000, %s has uid %s" % (name, user_info.pw_uid)) return cls(name, user_info.pw_name, user_info.pw_uid, user_info.pw_gid, user_info.pw_dir) def log(self, message): """ Write to a log file in the tool's homedir """ log_line = "%s %s" % (time.asctime(), message) log_path = os.path.join(self.home, 'service.log') with effective_user(self.uid, self.gid): with open(log_path, 'a') as f: f.write(log_line + '\n')
import os import datetime import pwd from .utils import effective_user class Tool(object): USER_NAME_PATTERN = 'tools.%s' class InvalidToolException(Exception): pass def __init__(self, name, username, uid, gid, home): self.name = name self.uid = uid self.gid = gid self.username = username self.home = home @classmethod def from_name(cls, name): """ Create a Tool instance from a tool name """ username = Tool.USER_NAME_PATTERN % (name, ) try: user_info = pwd.getpwnam(username) except KeyError: # No such user was found raise Tool.InvalidToolException("No tool with name %s" % (name, )) if user_info.pw_uid < 50000: raise Tool.InvalidToolException("uid of tools should be < 50000, %s has uid %s" % (name, user_info.pw_uid)) return cls(name, user_info.pw_name, user_info.pw_uid, user_info.pw_gid, user_info.pw_dir) def log(self, message): """ Write to a log file in the tool's homedir """ log_line = "%s %s" % (datetime.datetime.now().isoformat(), message) log_path = os.path.join(self.home, 'service.log') with effective_user(self.uid, self.gid): with open(log_path, 'a') as f: f.write(log_line + '\n')
Use isoformat in datetime logs, rather than asctime
Use isoformat in datetime logs, rather than asctime Change-Id: Ic11a70e28288517b6f174d7066f71a12efd5f4f1
Python
mit
wikimedia/operations-software-tools-manifest
import os import time import pwd from .utils import effective_user class Tool(object): USER_NAME_PATTERN = 'tools.%s' class InvalidToolException(Exception): pass def __init__(self, name, username, uid, gid, home): self.name = name self.uid = uid self.gid = gid self.username = username self.home = home @classmethod def from_name(cls, name): """ Create a Tool instance from a tool name """ username = Tool.USER_NAME_PATTERN % (name, ) try: user_info = pwd.getpwnam(username) except KeyError: # No such user was found raise Tool.InvalidToolException("No tool with name %s" % (name, )) if user_info.pw_uid < 50000: raise Tool.InvalidToolException("uid of tools should be < 50000, %s has uid %s" % (name, user_info.pw_uid)) return cls(name, user_info.pw_name, user_info.pw_uid, user_info.pw_gid, user_info.pw_dir) def log(self, message): """ Write to a log file in the tool's homedir """ log_line = "%s %s" % (time.asctime(), message) log_path = os.path.join(self.home, 'service.log') with effective_user(self.uid, self.gid): with open(log_path, 'a') as f: f.write(log_line + '\n') Use isoformat in datetime logs, rather than asctime Change-Id: Ic11a70e28288517b6f174d7066f71a12efd5f4f1
import os import datetime import pwd from .utils import effective_user class Tool(object): USER_NAME_PATTERN = 'tools.%s' class InvalidToolException(Exception): pass def __init__(self, name, username, uid, gid, home): self.name = name self.uid = uid self.gid = gid self.username = username self.home = home @classmethod def from_name(cls, name): """ Create a Tool instance from a tool name """ username = Tool.USER_NAME_PATTERN % (name, ) try: user_info = pwd.getpwnam(username) except KeyError: # No such user was found raise Tool.InvalidToolException("No tool with name %s" % (name, )) if user_info.pw_uid < 50000: raise Tool.InvalidToolException("uid of tools should be < 50000, %s has uid %s" % (name, user_info.pw_uid)) return cls(name, user_info.pw_name, user_info.pw_uid, user_info.pw_gid, user_info.pw_dir) def log(self, message): """ Write to a log file in the tool's homedir """ log_line = "%s %s" % (datetime.datetime.now().isoformat(), message) log_path = os.path.join(self.home, 'service.log') with effective_user(self.uid, self.gid): with open(log_path, 'a') as f: f.write(log_line + '\n')
<commit_before>import os import time import pwd from .utils import effective_user class Tool(object): USER_NAME_PATTERN = 'tools.%s' class InvalidToolException(Exception): pass def __init__(self, name, username, uid, gid, home): self.name = name self.uid = uid self.gid = gid self.username = username self.home = home @classmethod def from_name(cls, name): """ Create a Tool instance from a tool name """ username = Tool.USER_NAME_PATTERN % (name, ) try: user_info = pwd.getpwnam(username) except KeyError: # No such user was found raise Tool.InvalidToolException("No tool with name %s" % (name, )) if user_info.pw_uid < 50000: raise Tool.InvalidToolException("uid of tools should be < 50000, %s has uid %s" % (name, user_info.pw_uid)) return cls(name, user_info.pw_name, user_info.pw_uid, user_info.pw_gid, user_info.pw_dir) def log(self, message): """ Write to a log file in the tool's homedir """ log_line = "%s %s" % (time.asctime(), message) log_path = os.path.join(self.home, 'service.log') with effective_user(self.uid, self.gid): with open(log_path, 'a') as f: f.write(log_line + '\n') <commit_msg>Use isoformat in datetime logs, rather than asctime Change-Id: Ic11a70e28288517b6f174d7066f71a12efd5f4f1<commit_after>
import os import datetime import pwd from .utils import effective_user class Tool(object): USER_NAME_PATTERN = 'tools.%s' class InvalidToolException(Exception): pass def __init__(self, name, username, uid, gid, home): self.name = name self.uid = uid self.gid = gid self.username = username self.home = home @classmethod def from_name(cls, name): """ Create a Tool instance from a tool name """ username = Tool.USER_NAME_PATTERN % (name, ) try: user_info = pwd.getpwnam(username) except KeyError: # No such user was found raise Tool.InvalidToolException("No tool with name %s" % (name, )) if user_info.pw_uid < 50000: raise Tool.InvalidToolException("uid of tools should be < 50000, %s has uid %s" % (name, user_info.pw_uid)) return cls(name, user_info.pw_name, user_info.pw_uid, user_info.pw_gid, user_info.pw_dir) def log(self, message): """ Write to a log file in the tool's homedir """ log_line = "%s %s" % (datetime.datetime.now().isoformat(), message) log_path = os.path.join(self.home, 'service.log') with effective_user(self.uid, self.gid): with open(log_path, 'a') as f: f.write(log_line + '\n')
import os import time import pwd from .utils import effective_user class Tool(object): USER_NAME_PATTERN = 'tools.%s' class InvalidToolException(Exception): pass def __init__(self, name, username, uid, gid, home): self.name = name self.uid = uid self.gid = gid self.username = username self.home = home @classmethod def from_name(cls, name): """ Create a Tool instance from a tool name """ username = Tool.USER_NAME_PATTERN % (name, ) try: user_info = pwd.getpwnam(username) except KeyError: # No such user was found raise Tool.InvalidToolException("No tool with name %s" % (name, )) if user_info.pw_uid < 50000: raise Tool.InvalidToolException("uid of tools should be < 50000, %s has uid %s" % (name, user_info.pw_uid)) return cls(name, user_info.pw_name, user_info.pw_uid, user_info.pw_gid, user_info.pw_dir) def log(self, message): """ Write to a log file in the tool's homedir """ log_line = "%s %s" % (time.asctime(), message) log_path = os.path.join(self.home, 'service.log') with effective_user(self.uid, self.gid): with open(log_path, 'a') as f: f.write(log_line + '\n') Use isoformat in datetime logs, rather than asctime Change-Id: Ic11a70e28288517b6f174d7066f71a12efd5f4f1import os import datetime import pwd from .utils import effective_user class Tool(object): USER_NAME_PATTERN = 'tools.%s' class InvalidToolException(Exception): pass def __init__(self, name, username, uid, gid, home): self.name = name self.uid = uid self.gid = gid self.username = username self.home = home @classmethod def from_name(cls, name): """ Create a Tool instance from a tool name """ username = Tool.USER_NAME_PATTERN % (name, ) try: user_info = pwd.getpwnam(username) except KeyError: # No such user was found raise Tool.InvalidToolException("No tool with name %s" % (name, )) if user_info.pw_uid < 50000: raise Tool.InvalidToolException("uid of tools should be < 50000, %s has uid %s" % (name, user_info.pw_uid)) return cls(name, user_info.pw_name, user_info.pw_uid, user_info.pw_gid, user_info.pw_dir) def log(self, message): """ Write to a log file in the tool's homedir """ log_line = "%s %s" % (datetime.datetime.now().isoformat(), message) log_path = os.path.join(self.home, 'service.log') with effective_user(self.uid, self.gid): with open(log_path, 'a') as f: f.write(log_line + '\n')
<commit_before>import os import time import pwd from .utils import effective_user class Tool(object): USER_NAME_PATTERN = 'tools.%s' class InvalidToolException(Exception): pass def __init__(self, name, username, uid, gid, home): self.name = name self.uid = uid self.gid = gid self.username = username self.home = home @classmethod def from_name(cls, name): """ Create a Tool instance from a tool name """ username = Tool.USER_NAME_PATTERN % (name, ) try: user_info = pwd.getpwnam(username) except KeyError: # No such user was found raise Tool.InvalidToolException("No tool with name %s" % (name, )) if user_info.pw_uid < 50000: raise Tool.InvalidToolException("uid of tools should be < 50000, %s has uid %s" % (name, user_info.pw_uid)) return cls(name, user_info.pw_name, user_info.pw_uid, user_info.pw_gid, user_info.pw_dir) def log(self, message): """ Write to a log file in the tool's homedir """ log_line = "%s %s" % (time.asctime(), message) log_path = os.path.join(self.home, 'service.log') with effective_user(self.uid, self.gid): with open(log_path, 'a') as f: f.write(log_line + '\n') <commit_msg>Use isoformat in datetime logs, rather than asctime Change-Id: Ic11a70e28288517b6f174d7066f71a12efd5f4f1<commit_after>import os import datetime import pwd from .utils import effective_user class Tool(object): USER_NAME_PATTERN = 'tools.%s' class InvalidToolException(Exception): pass def __init__(self, name, username, uid, gid, home): self.name = name self.uid = uid self.gid = gid self.username = username self.home = home @classmethod def from_name(cls, name): """ Create a Tool instance from a tool name """ username = Tool.USER_NAME_PATTERN % (name, ) try: user_info = pwd.getpwnam(username) except KeyError: # No such user was found raise Tool.InvalidToolException("No tool with name %s" % (name, )) if user_info.pw_uid < 50000: raise Tool.InvalidToolException("uid of tools should be < 50000, %s has uid %s" % (name, user_info.pw_uid)) return cls(name, user_info.pw_name, user_info.pw_uid, user_info.pw_gid, user_info.pw_dir) def log(self, message): """ Write to a log file in the tool's homedir """ log_line = "%s %s" % (datetime.datetime.now().isoformat(), message) log_path = os.path.join(self.home, 'service.log') with effective_user(self.uid, self.gid): with open(log_path, 'a') as f: f.write(log_line + '\n')
8d0b9da511d55191609ffbd88a8b11afd6ff0367
remedy/radremedy.py
remedy/radremedy.py
#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from rad.models import db, Resource def create_app(config, models=()): from remedyblueprint import remedy, url_for_other_page app = Flask(__name__) app.config.from_object(config) app.register_blueprint(remedy) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': app, manager = create_app('config.BaseConfig', (Resource, )) with app.app_context(): manager.run()
#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from flask.ext.login import current_user from rad.models import db, Resource def create_app(config, models=()): app = Flask(__name__) app.config.from_object(config) from remedyblueprint import remedy, url_for_other_page app.register_blueprint(remedy) from auth.user_auth import auth, login_manager app.register_blueprint(auth) login_manager.init_app(app) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page app.jinja_env.globals['logged_in'] = lambda : not current_user.is_anonymous() db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': application, manager = create_app('config.BaseConfig', (Resource, )) with application.app_context(): manager.run()
Move around imports and not shadow app
Move around imports and not shadow app
Python
mpl-2.0
radioprotector/radremedy,radioprotector/radremedy,radioprotector/radremedy,AllieDeford/radremedy,AllieDeford/radremedy,radremedy/radremedy,radremedy/radremedy,radremedy/radremedy,AllieDeford/radremedy,radioprotector/radremedy,radremedy/radremedy
#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from rad.models import db, Resource def create_app(config, models=()): from remedyblueprint import remedy, url_for_other_page app = Flask(__name__) app.config.from_object(config) app.register_blueprint(remedy) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': app, manager = create_app('config.BaseConfig', (Resource, )) with app.app_context(): manager.run() Move around imports and not shadow app
#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from flask.ext.login import current_user from rad.models import db, Resource def create_app(config, models=()): app = Flask(__name__) app.config.from_object(config) from remedyblueprint import remedy, url_for_other_page app.register_blueprint(remedy) from auth.user_auth import auth, login_manager app.register_blueprint(auth) login_manager.init_app(app) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page app.jinja_env.globals['logged_in'] = lambda : not current_user.is_anonymous() db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': application, manager = create_app('config.BaseConfig', (Resource, )) with application.app_context(): manager.run()
<commit_before>#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from rad.models import db, Resource def create_app(config, models=()): from remedyblueprint import remedy, url_for_other_page app = Flask(__name__) app.config.from_object(config) app.register_blueprint(remedy) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': app, manager = create_app('config.BaseConfig', (Resource, )) with app.app_context(): manager.run() <commit_msg>Move around imports and not shadow app<commit_after>
#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from flask.ext.login import current_user from rad.models import db, Resource def create_app(config, models=()): app = Flask(__name__) app.config.from_object(config) from remedyblueprint import remedy, url_for_other_page app.register_blueprint(remedy) from auth.user_auth import auth, login_manager app.register_blueprint(auth) login_manager.init_app(app) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page app.jinja_env.globals['logged_in'] = lambda : not current_user.is_anonymous() db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': application, manager = create_app('config.BaseConfig', (Resource, )) with application.app_context(): manager.run()
#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from rad.models import db, Resource def create_app(config, models=()): from remedyblueprint import remedy, url_for_other_page app = Flask(__name__) app.config.from_object(config) app.register_blueprint(remedy) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': app, manager = create_app('config.BaseConfig', (Resource, )) with app.app_context(): manager.run() Move around imports and not shadow app#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from flask.ext.login import current_user from rad.models import db, Resource def create_app(config, models=()): app = Flask(__name__) app.config.from_object(config) from remedyblueprint import remedy, url_for_other_page app.register_blueprint(remedy) from auth.user_auth import auth, login_manager app.register_blueprint(auth) login_manager.init_app(app) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page app.jinja_env.globals['logged_in'] = lambda : not current_user.is_anonymous() db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': application, manager = create_app('config.BaseConfig', (Resource, )) with application.app_context(): manager.run()
<commit_before>#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from rad.models import db, Resource def create_app(config, models=()): from remedyblueprint import remedy, url_for_other_page app = Flask(__name__) app.config.from_object(config) app.register_blueprint(remedy) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': app, manager = create_app('config.BaseConfig', (Resource, )) with app.app_context(): manager.run() <commit_msg>Move around imports and not shadow app<commit_after>#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from flask.ext.login import current_user from rad.models import db, Resource def create_app(config, models=()): app = Flask(__name__) app.config.from_object(config) from remedyblueprint import remedy, url_for_other_page app.register_blueprint(remedy) from auth.user_auth import auth, login_manager app.register_blueprint(auth) login_manager.init_app(app) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page app.jinja_env.globals['logged_in'] = lambda : not current_user.is_anonymous() db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': application, manager = create_app('config.BaseConfig', (Resource, )) with application.app_context(): manager.run()
290280725db406dac9a185b08600bcb0fba3d2e3
src/gn.py
src/gn.py
''' GotoNewest A tool to quickly transfer to the latest directory created in a base directory, provided the name of the base directory is supplied as an argument ''' import os def transfer(basepath=None): ''' Transfer to the newest directory in the basepath directory ''' if basepath is None: raise AttributeError subdirs = os.listdir(basepath) if len(subdirs) == 0: raise AttributeError if len(subdirs) == 1: return subdirs.pop() latest_subdir = get_latest_modified_subdir(basepath) return latest_subdir def get_latest_modified_subdir(basepath): ''' Iterate through all the subdirectories in a given path and return the one with the latest modified time ''' # TODO: There has to be a better way of doing this all_subdirs = os.listdir(basepath) latest = os.path.getmtime(basepath + '/' + all_subdirs[0]) latest_subdir = all_subdirs[0] for subdir in all_subdirs: mtime = os.path.getmtime(basepath + '/' + subdir) if mtime > latest: latest = mtime latest_subdir = subdir return latest_subdir
''' GotoNewest A tool to quickly transfer to the latest directory created in a base directory, provided the name of the base directory is supplied as an argument ''' import os import sys import gn_helper def transfer(basepath=None): ''' Transfer to the newest directory in the basepath directory ''' if basepath is None: raise AttributeError subdirs = os.listdir(basepath) if len(subdirs) == 0: raise AttributeError if len(subdirs) == 1: return subdirs.pop() latest_subdir = get_latest_modified_subdir(basepath) return latest_subdir def get_latest_modified_subdir(basepath): ''' Iterate through all the subdirectories in a given path and return the one with the latest modified time ''' # TODO: There has to be a better way of doing this all_subdirs = os.listdir(basepath) latest = os.path.getmtime(basepath + '/' + all_subdirs[0]) latest_subdir = all_subdirs[0] for subdir in all_subdirs: mtime = os.path.getmtime(basepath + '/' + subdir) if mtime > latest: latest = mtime latest_subdir = subdir return latest_subdir def main(): ''' Get the latest directory created in the directory set as basepath in the config file, and then cd into it ''' basepath = gn_helper.get_basepath() if basepath is not None: os.chdir(basepath) print 'Testing: in', os.getcwd() else: print 'Sorry, the basepath is not valid' if __name__ == '__main__': main()
Add main method to test directory change
Add main method to test directory change
Python
bsd-2-clause
ambidextrousTx/GotoNewest
''' GotoNewest A tool to quickly transfer to the latest directory created in a base directory, provided the name of the base directory is supplied as an argument ''' import os def transfer(basepath=None): ''' Transfer to the newest directory in the basepath directory ''' if basepath is None: raise AttributeError subdirs = os.listdir(basepath) if len(subdirs) == 0: raise AttributeError if len(subdirs) == 1: return subdirs.pop() latest_subdir = get_latest_modified_subdir(basepath) return latest_subdir def get_latest_modified_subdir(basepath): ''' Iterate through all the subdirectories in a given path and return the one with the latest modified time ''' # TODO: There has to be a better way of doing this all_subdirs = os.listdir(basepath) latest = os.path.getmtime(basepath + '/' + all_subdirs[0]) latest_subdir = all_subdirs[0] for subdir in all_subdirs: mtime = os.path.getmtime(basepath + '/' + subdir) if mtime > latest: latest = mtime latest_subdir = subdir return latest_subdir Add main method to test directory change
''' GotoNewest A tool to quickly transfer to the latest directory created in a base directory, provided the name of the base directory is supplied as an argument ''' import os import sys import gn_helper def transfer(basepath=None): ''' Transfer to the newest directory in the basepath directory ''' if basepath is None: raise AttributeError subdirs = os.listdir(basepath) if len(subdirs) == 0: raise AttributeError if len(subdirs) == 1: return subdirs.pop() latest_subdir = get_latest_modified_subdir(basepath) return latest_subdir def get_latest_modified_subdir(basepath): ''' Iterate through all the subdirectories in a given path and return the one with the latest modified time ''' # TODO: There has to be a better way of doing this all_subdirs = os.listdir(basepath) latest = os.path.getmtime(basepath + '/' + all_subdirs[0]) latest_subdir = all_subdirs[0] for subdir in all_subdirs: mtime = os.path.getmtime(basepath + '/' + subdir) if mtime > latest: latest = mtime latest_subdir = subdir return latest_subdir def main(): ''' Get the latest directory created in the directory set as basepath in the config file, and then cd into it ''' basepath = gn_helper.get_basepath() if basepath is not None: os.chdir(basepath) print 'Testing: in', os.getcwd() else: print 'Sorry, the basepath is not valid' if __name__ == '__main__': main()
<commit_before>''' GotoNewest A tool to quickly transfer to the latest directory created in a base directory, provided the name of the base directory is supplied as an argument ''' import os def transfer(basepath=None): ''' Transfer to the newest directory in the basepath directory ''' if basepath is None: raise AttributeError subdirs = os.listdir(basepath) if len(subdirs) == 0: raise AttributeError if len(subdirs) == 1: return subdirs.pop() latest_subdir = get_latest_modified_subdir(basepath) return latest_subdir def get_latest_modified_subdir(basepath): ''' Iterate through all the subdirectories in a given path and return the one with the latest modified time ''' # TODO: There has to be a better way of doing this all_subdirs = os.listdir(basepath) latest = os.path.getmtime(basepath + '/' + all_subdirs[0]) latest_subdir = all_subdirs[0] for subdir in all_subdirs: mtime = os.path.getmtime(basepath + '/' + subdir) if mtime > latest: latest = mtime latest_subdir = subdir return latest_subdir <commit_msg>Add main method to test directory change<commit_after>
''' GotoNewest A tool to quickly transfer to the latest directory created in a base directory, provided the name of the base directory is supplied as an argument ''' import os import sys import gn_helper def transfer(basepath=None): ''' Transfer to the newest directory in the basepath directory ''' if basepath is None: raise AttributeError subdirs = os.listdir(basepath) if len(subdirs) == 0: raise AttributeError if len(subdirs) == 1: return subdirs.pop() latest_subdir = get_latest_modified_subdir(basepath) return latest_subdir def get_latest_modified_subdir(basepath): ''' Iterate through all the subdirectories in a given path and return the one with the latest modified time ''' # TODO: There has to be a better way of doing this all_subdirs = os.listdir(basepath) latest = os.path.getmtime(basepath + '/' + all_subdirs[0]) latest_subdir = all_subdirs[0] for subdir in all_subdirs: mtime = os.path.getmtime(basepath + '/' + subdir) if mtime > latest: latest = mtime latest_subdir = subdir return latest_subdir def main(): ''' Get the latest directory created in the directory set as basepath in the config file, and then cd into it ''' basepath = gn_helper.get_basepath() if basepath is not None: os.chdir(basepath) print 'Testing: in', os.getcwd() else: print 'Sorry, the basepath is not valid' if __name__ == '__main__': main()
''' GotoNewest A tool to quickly transfer to the latest directory created in a base directory, provided the name of the base directory is supplied as an argument ''' import os def transfer(basepath=None): ''' Transfer to the newest directory in the basepath directory ''' if basepath is None: raise AttributeError subdirs = os.listdir(basepath) if len(subdirs) == 0: raise AttributeError if len(subdirs) == 1: return subdirs.pop() latest_subdir = get_latest_modified_subdir(basepath) return latest_subdir def get_latest_modified_subdir(basepath): ''' Iterate through all the subdirectories in a given path and return the one with the latest modified time ''' # TODO: There has to be a better way of doing this all_subdirs = os.listdir(basepath) latest = os.path.getmtime(basepath + '/' + all_subdirs[0]) latest_subdir = all_subdirs[0] for subdir in all_subdirs: mtime = os.path.getmtime(basepath + '/' + subdir) if mtime > latest: latest = mtime latest_subdir = subdir return latest_subdir Add main method to test directory change''' GotoNewest A tool to quickly transfer to the latest directory created in a base directory, provided the name of the base directory is supplied as an argument ''' import os import sys import gn_helper def transfer(basepath=None): ''' Transfer to the newest directory in the basepath directory ''' if basepath is None: raise AttributeError subdirs = os.listdir(basepath) if len(subdirs) == 0: raise AttributeError if len(subdirs) == 1: return subdirs.pop() latest_subdir = get_latest_modified_subdir(basepath) return latest_subdir def get_latest_modified_subdir(basepath): ''' Iterate through all the subdirectories in a given path and return the one with the latest modified time ''' # TODO: There has to be a better way of doing this all_subdirs = os.listdir(basepath) latest = os.path.getmtime(basepath + '/' + all_subdirs[0]) latest_subdir = all_subdirs[0] for subdir in all_subdirs: mtime = os.path.getmtime(basepath + '/' + subdir) if mtime > latest: latest = mtime latest_subdir = subdir return latest_subdir def main(): ''' Get the latest directory created in the directory set as basepath in the config file, and then cd into it ''' basepath = gn_helper.get_basepath() if basepath is not None: os.chdir(basepath) print 'Testing: in', os.getcwd() else: print 'Sorry, the basepath is not valid' if __name__ == '__main__': main()
<commit_before>''' GotoNewest A tool to quickly transfer to the latest directory created in a base directory, provided the name of the base directory is supplied as an argument ''' import os def transfer(basepath=None): ''' Transfer to the newest directory in the basepath directory ''' if basepath is None: raise AttributeError subdirs = os.listdir(basepath) if len(subdirs) == 0: raise AttributeError if len(subdirs) == 1: return subdirs.pop() latest_subdir = get_latest_modified_subdir(basepath) return latest_subdir def get_latest_modified_subdir(basepath): ''' Iterate through all the subdirectories in a given path and return the one with the latest modified time ''' # TODO: There has to be a better way of doing this all_subdirs = os.listdir(basepath) latest = os.path.getmtime(basepath + '/' + all_subdirs[0]) latest_subdir = all_subdirs[0] for subdir in all_subdirs: mtime = os.path.getmtime(basepath + '/' + subdir) if mtime > latest: latest = mtime latest_subdir = subdir return latest_subdir <commit_msg>Add main method to test directory change<commit_after>''' GotoNewest A tool to quickly transfer to the latest directory created in a base directory, provided the name of the base directory is supplied as an argument ''' import os import sys import gn_helper def transfer(basepath=None): ''' Transfer to the newest directory in the basepath directory ''' if basepath is None: raise AttributeError subdirs = os.listdir(basepath) if len(subdirs) == 0: raise AttributeError if len(subdirs) == 1: return subdirs.pop() latest_subdir = get_latest_modified_subdir(basepath) return latest_subdir def get_latest_modified_subdir(basepath): ''' Iterate through all the subdirectories in a given path and return the one with the latest modified time ''' # TODO: There has to be a better way of doing this all_subdirs = os.listdir(basepath) latest = os.path.getmtime(basepath + '/' + all_subdirs[0]) latest_subdir = all_subdirs[0] for subdir in all_subdirs: mtime = os.path.getmtime(basepath + '/' + subdir) if mtime > latest: latest = mtime latest_subdir = subdir return latest_subdir def main(): ''' Get the latest directory created in the directory set as basepath in the config file, and then cd into it ''' basepath = gn_helper.get_basepath() if basepath is not None: os.chdir(basepath) print 'Testing: in', os.getcwd() else: print 'Sorry, the basepath is not valid' if __name__ == '__main__': main()
26a4dc79b6ef9b19b9c5f2394386980aa452dc8e
licensing/data/location.py
licensing/data/location.py
import httplib2 import json import itertools from collections import defaultdict from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import flow_from_clientsecrets from oauth2client.tools import run class Locations(object): CLIENT_SECRETS_FILE = 'client_secrets.json' FLOW = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope = "https://www.googleapis.com/auth/analytics.readonly", message = "Something seems to have gone wrong, check the client secrets file") def __init__(self): storage = Storage("tokens.dat") credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) service = build("analytics","v3", http = http) query = service.data().ga().get( metrics = "ga:visits", dimensions = "ga:pagePath,ga:country", max_results = "5000", start_date = "2013-01-01", end_date = "2013-02-01", ids = "ga:63654109", filters = "ga:pagePath=~^/apply-for-a-licence/.*/form$") response = query.execute()['rows'] self.results = response
import httplib2 import json import itertools from collections import defaultdict from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import flow_from_clientsecrets from oauth2client.tools import run class Locations(object): CLIENT_SECRETS_FILE = 'client_secrets.json' FLOW = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope = "https://www.googleapis.com/auth/analytics.readonly", message = "Something seems to have gone wrong, check the client secrets file") def __init__(self): storage = Storage("tokens.dat") credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(self.FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) service = build("analytics","v3", http = http) query = service.data().ga().get( metrics = "ga:visits", dimensions = "ga:pagePath,ga:country", max_results = "5000", start_date = "2013-01-01", end_date = "2013-02-01", ids = "ga:63654109", filters = "ga:pagePath=~^/apply-for-a-licence/.*/form$") response = query.execute()['rows'] self.results = response
Fix referencing of oauth flow.
Fix referencing of oauth flow. @gtrogers @robyoung
Python
mit
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
import httplib2 import json import itertools from collections import defaultdict from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import flow_from_clientsecrets from oauth2client.tools import run class Locations(object): CLIENT_SECRETS_FILE = 'client_secrets.json' FLOW = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope = "https://www.googleapis.com/auth/analytics.readonly", message = "Something seems to have gone wrong, check the client secrets file") def __init__(self): storage = Storage("tokens.dat") credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) service = build("analytics","v3", http = http) query = service.data().ga().get( metrics = "ga:visits", dimensions = "ga:pagePath,ga:country", max_results = "5000", start_date = "2013-01-01", end_date = "2013-02-01", ids = "ga:63654109", filters = "ga:pagePath=~^/apply-for-a-licence/.*/form$") response = query.execute()['rows'] self.results = response Fix referencing of oauth flow. @gtrogers @robyoung
import httplib2 import json import itertools from collections import defaultdict from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import flow_from_clientsecrets from oauth2client.tools import run class Locations(object): CLIENT_SECRETS_FILE = 'client_secrets.json' FLOW = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope = "https://www.googleapis.com/auth/analytics.readonly", message = "Something seems to have gone wrong, check the client secrets file") def __init__(self): storage = Storage("tokens.dat") credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(self.FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) service = build("analytics","v3", http = http) query = service.data().ga().get( metrics = "ga:visits", dimensions = "ga:pagePath,ga:country", max_results = "5000", start_date = "2013-01-01", end_date = "2013-02-01", ids = "ga:63654109", filters = "ga:pagePath=~^/apply-for-a-licence/.*/form$") response = query.execute()['rows'] self.results = response
<commit_before>import httplib2 import json import itertools from collections import defaultdict from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import flow_from_clientsecrets from oauth2client.tools import run class Locations(object): CLIENT_SECRETS_FILE = 'client_secrets.json' FLOW = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope = "https://www.googleapis.com/auth/analytics.readonly", message = "Something seems to have gone wrong, check the client secrets file") def __init__(self): storage = Storage("tokens.dat") credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) service = build("analytics","v3", http = http) query = service.data().ga().get( metrics = "ga:visits", dimensions = "ga:pagePath,ga:country", max_results = "5000", start_date = "2013-01-01", end_date = "2013-02-01", ids = "ga:63654109", filters = "ga:pagePath=~^/apply-for-a-licence/.*/form$") response = query.execute()['rows'] self.results = response <commit_msg>Fix referencing of oauth flow. @gtrogers @robyoung<commit_after>
import httplib2 import json import itertools from collections import defaultdict from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import flow_from_clientsecrets from oauth2client.tools import run class Locations(object): CLIENT_SECRETS_FILE = 'client_secrets.json' FLOW = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope = "https://www.googleapis.com/auth/analytics.readonly", message = "Something seems to have gone wrong, check the client secrets file") def __init__(self): storage = Storage("tokens.dat") credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(self.FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) service = build("analytics","v3", http = http) query = service.data().ga().get( metrics = "ga:visits", dimensions = "ga:pagePath,ga:country", max_results = "5000", start_date = "2013-01-01", end_date = "2013-02-01", ids = "ga:63654109", filters = "ga:pagePath=~^/apply-for-a-licence/.*/form$") response = query.execute()['rows'] self.results = response
import httplib2 import json import itertools from collections import defaultdict from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import flow_from_clientsecrets from oauth2client.tools import run class Locations(object): CLIENT_SECRETS_FILE = 'client_secrets.json' FLOW = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope = "https://www.googleapis.com/auth/analytics.readonly", message = "Something seems to have gone wrong, check the client secrets file") def __init__(self): storage = Storage("tokens.dat") credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) service = build("analytics","v3", http = http) query = service.data().ga().get( metrics = "ga:visits", dimensions = "ga:pagePath,ga:country", max_results = "5000", start_date = "2013-01-01", end_date = "2013-02-01", ids = "ga:63654109", filters = "ga:pagePath=~^/apply-for-a-licence/.*/form$") response = query.execute()['rows'] self.results = response Fix referencing of oauth flow. @gtrogers @robyoungimport httplib2 import json import itertools from collections import defaultdict from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import flow_from_clientsecrets from oauth2client.tools import run class Locations(object): CLIENT_SECRETS_FILE = 'client_secrets.json' FLOW = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope = "https://www.googleapis.com/auth/analytics.readonly", message = "Something seems to have gone wrong, check the client secrets file") def __init__(self): storage = Storage("tokens.dat") credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(self.FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) service = build("analytics","v3", http = http) query = service.data().ga().get( metrics = "ga:visits", dimensions = "ga:pagePath,ga:country", max_results = "5000", start_date = "2013-01-01", end_date = "2013-02-01", ids = "ga:63654109", filters = "ga:pagePath=~^/apply-for-a-licence/.*/form$") response = query.execute()['rows'] self.results = response
<commit_before>import httplib2 import json import itertools from collections import defaultdict from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import flow_from_clientsecrets from oauth2client.tools import run class Locations(object): CLIENT_SECRETS_FILE = 'client_secrets.json' FLOW = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope = "https://www.googleapis.com/auth/analytics.readonly", message = "Something seems to have gone wrong, check the client secrets file") def __init__(self): storage = Storage("tokens.dat") credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) service = build("analytics","v3", http = http) query = service.data().ga().get( metrics = "ga:visits", dimensions = "ga:pagePath,ga:country", max_results = "5000", start_date = "2013-01-01", end_date = "2013-02-01", ids = "ga:63654109", filters = "ga:pagePath=~^/apply-for-a-licence/.*/form$") response = query.execute()['rows'] self.results = response <commit_msg>Fix referencing of oauth flow. @gtrogers @robyoung<commit_after>import httplib2 import json import itertools from collections import defaultdict from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import flow_from_clientsecrets from oauth2client.tools import run class Locations(object): CLIENT_SECRETS_FILE = 'client_secrets.json' FLOW = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope = "https://www.googleapis.com/auth/analytics.readonly", message = "Something seems to have gone wrong, check the client secrets file") def __init__(self): storage = Storage("tokens.dat") credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(self.FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) service = build("analytics","v3", http = http) query = service.data().ga().get( metrics = "ga:visits", dimensions = "ga:pagePath,ga:country", max_results = "5000", start_date = "2013-01-01", end_date = "2013-02-01", ids = "ga:63654109", filters = "ga:pagePath=~^/apply-for-a-licence/.*/form$") response = query.execute()['rows'] self.results = response
608325c33cb2d446b89c263ba0bb02ced5c4ffe8
portal/views.py
portal/views.py
import csv from django.shortcuts import render from django.http import HttpResponse from . import services def index(request): data = services.overview() return render(request, 'index.html', data) def meter_runs(request): """Render the table of exported MeterRun results in html""" data = services.meterruns_export() return render(request, 'project_run_table.html', data) def csv_export(request): """Return a dump of all the MeterRuns in CSV form""" response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="meter_runs.csv"' writer = csv.writer(response) writer.writerow(['First row', 'Foo', 'Bar']) return response
import csv from django.shortcuts import render from django.http import HttpResponse from . import services def index(request): data = services.overview() return render(request, 'index.html', data) def meter_runs(request): """Render the table of exported MeterRun results in html""" data = services.meterruns_export() return render(request, 'project_run_table.html', data) def csv_export(request): """Return a dump of all the MeterRuns in CSV form""" response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="meter_runs.csv"' data = services.meterruns_export() writer = csv.DictWriter(response, fieldnames=data['headers']) writer.writeheader() for meter_run in data['meter_runs']: writer.writerow(meter_run) return response
Use the meterrun_export service to power csv export
Use the meterrun_export service to power csv export
Python
mit
impactlab/oeem-energy-datastore,impactlab/oeem-energy-datastore,impactlab/oeem-energy-datastore
import csv from django.shortcuts import render from django.http import HttpResponse from . import services def index(request): data = services.overview() return render(request, 'index.html', data) def meter_runs(request): """Render the table of exported MeterRun results in html""" data = services.meterruns_export() return render(request, 'project_run_table.html', data) def csv_export(request): """Return a dump of all the MeterRuns in CSV form""" response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="meter_runs.csv"' writer = csv.writer(response) writer.writerow(['First row', 'Foo', 'Bar']) return responseUse the meterrun_export service to power csv export
import csv from django.shortcuts import render from django.http import HttpResponse from . import services def index(request): data = services.overview() return render(request, 'index.html', data) def meter_runs(request): """Render the table of exported MeterRun results in html""" data = services.meterruns_export() return render(request, 'project_run_table.html', data) def csv_export(request): """Return a dump of all the MeterRuns in CSV form""" response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="meter_runs.csv"' data = services.meterruns_export() writer = csv.DictWriter(response, fieldnames=data['headers']) writer.writeheader() for meter_run in data['meter_runs']: writer.writerow(meter_run) return response
<commit_before>import csv from django.shortcuts import render from django.http import HttpResponse from . import services def index(request): data = services.overview() return render(request, 'index.html', data) def meter_runs(request): """Render the table of exported MeterRun results in html""" data = services.meterruns_export() return render(request, 'project_run_table.html', data) def csv_export(request): """Return a dump of all the MeterRuns in CSV form""" response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="meter_runs.csv"' writer = csv.writer(response) writer.writerow(['First row', 'Foo', 'Bar']) return response<commit_msg>Use the meterrun_export service to power csv export<commit_after>
import csv from django.shortcuts import render from django.http import HttpResponse from . import services def index(request): data = services.overview() return render(request, 'index.html', data) def meter_runs(request): """Render the table of exported MeterRun results in html""" data = services.meterruns_export() return render(request, 'project_run_table.html', data) def csv_export(request): """Return a dump of all the MeterRuns in CSV form""" response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="meter_runs.csv"' data = services.meterruns_export() writer = csv.DictWriter(response, fieldnames=data['headers']) writer.writeheader() for meter_run in data['meter_runs']: writer.writerow(meter_run) return response
import csv from django.shortcuts import render from django.http import HttpResponse from . import services def index(request): data = services.overview() return render(request, 'index.html', data) def meter_runs(request): """Render the table of exported MeterRun results in html""" data = services.meterruns_export() return render(request, 'project_run_table.html', data) def csv_export(request): """Return a dump of all the MeterRuns in CSV form""" response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="meter_runs.csv"' writer = csv.writer(response) writer.writerow(['First row', 'Foo', 'Bar']) return responseUse the meterrun_export service to power csv exportimport csv from django.shortcuts import render from django.http import HttpResponse from . import services def index(request): data = services.overview() return render(request, 'index.html', data) def meter_runs(request): """Render the table of exported MeterRun results in html""" data = services.meterruns_export() return render(request, 'project_run_table.html', data) def csv_export(request): """Return a dump of all the MeterRuns in CSV form""" response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="meter_runs.csv"' data = services.meterruns_export() writer = csv.DictWriter(response, fieldnames=data['headers']) writer.writeheader() for meter_run in data['meter_runs']: writer.writerow(meter_run) return response
<commit_before>import csv from django.shortcuts import render from django.http import HttpResponse from . import services def index(request): data = services.overview() return render(request, 'index.html', data) def meter_runs(request): """Render the table of exported MeterRun results in html""" data = services.meterruns_export() return render(request, 'project_run_table.html', data) def csv_export(request): """Return a dump of all the MeterRuns in CSV form""" response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="meter_runs.csv"' writer = csv.writer(response) writer.writerow(['First row', 'Foo', 'Bar']) return response<commit_msg>Use the meterrun_export service to power csv export<commit_after>import csv from django.shortcuts import render from django.http import HttpResponse from . import services def index(request): data = services.overview() return render(request, 'index.html', data) def meter_runs(request): """Render the table of exported MeterRun results in html""" data = services.meterruns_export() return render(request, 'project_run_table.html', data) def csv_export(request): """Return a dump of all the MeterRuns in CSV form""" response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="meter_runs.csv"' data = services.meterruns_export() writer = csv.DictWriter(response, fieldnames=data['headers']) writer.writeheader() for meter_run in data['meter_runs']: writer.writerow(meter_run) return response
cca2bd0d4cfb14dbf85e4275fd9d064b9ffa08cc
urlgetters/yle_urls.py
urlgetters/yle_urls.py
import requests url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:0,coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" i = 0 while True: url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:" + str( i * 100 ) + ",coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" r = requests.get( url ) r = r.json() d = r['data'] items = d['uutisetMostRecentNews']['items'] for item in items: print item['fullUrl'] if d['meta']['remaining'] < 100: return i += 1
import requests url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:0,coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" i = 0 while True: url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:" + str( i * 100 ) + ",coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" r = requests.get( url ) r = r.json() d = r['data']['uutisetMostRecentNews'] items = d['items'] for item in items: print item['fullUrl'] if d['meta']['remaining'] < 100: break i += 1
Make script run and fix all typos
Make script run and fix all typos
Python
mit
HIIT/mediacollection
import requests url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:0,coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" i = 0 while True: url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:" + str( i * 100 ) + ",coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" r = requests.get( url ) r = r.json() d = r['data'] items = d['uutisetMostRecentNews']['items'] for item in items: print item['fullUrl'] if d['meta']['remaining'] < 100: return i += 1 Make script run and fix all typos
import requests url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:0,coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" i = 0 while True: url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:" + str( i * 100 ) + ",coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" r = requests.get( url ) r = r.json() d = r['data']['uutisetMostRecentNews'] items = d['items'] for item in items: print item['fullUrl'] if d['meta']['remaining'] < 100: break i += 1
<commit_before>import requests url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:0,coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" i = 0 while True: url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:" + str( i * 100 ) + ",coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" r = requests.get( url ) r = r.json() d = r['data'] items = d['uutisetMostRecentNews']['items'] for item in items: print item['fullUrl'] if d['meta']['remaining'] < 100: return i += 1 <commit_msg>Make script run and fix all typos<commit_after>
import requests url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:0,coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" i = 0 while True: url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:" + str( i * 100 ) + ",coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" r = requests.get( url ) r = r.json() d = r['data']['uutisetMostRecentNews'] items = d['items'] for item in items: print item['fullUrl'] if d['meta']['remaining'] < 100: break i += 1
import requests url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:0,coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" i = 0 while True: url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:" + str( i * 100 ) + ",coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" r = requests.get( url ) r = r.json() d = r['data'] items = d['uutisetMostRecentNews']['items'] for item in items: print item['fullUrl'] if d['meta']['remaining'] < 100: return i += 1 Make script run and fix all typos import requests url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:0,coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" i = 0 while True: url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:" + str( i * 100 ) + ",coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" r = requests.get( url ) r = r.json() d = r['data']['uutisetMostRecentNews'] items = d['items'] for item in items: print item['fullUrl'] if d['meta']['remaining'] < 100: break i += 1
<commit_before>import requests url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:0,coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" i = 0 while True: url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:" + str( i * 100 ) + ",coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" r = requests.get( url ) r = r.json() d = r['data'] items = d['uutisetMostRecentNews']['items'] for item in items: print item['fullUrl'] if d['meta']['remaining'] < 100: return i += 1 <commit_msg>Make script run and fix all typos<commit_after> import requests url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:0,coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" i = 0 while True: url = "https://ua.api.yle.fi/graphql?app_id=8d7303fe&app_key=105875199ef3a1f7e0fbf7e2834b2dc&query={uutisetMostRecentNews:articleList(publisher:YLE_UUTISET,limit:100,offset:" + str( i * 100 ) + ",coverage:NATIONAL){meta{count,total,remaining},items{fullUrl,properties}}}" r = requests.get( url ) r = r.json() d = r['data']['uutisetMostRecentNews'] items = d['items'] for item in items: print item['fullUrl'] if d['meta']['remaining'] < 100: break i += 1
54d67ce544e95ecb58a62062ffe50fcd95db6f09
sso/apps.py
sso/apps.py
from django.apps import AppConfig class SsoConfig(AppConfig): name = 'sso' github_client_id = '844189c44c56ff04e727' github_client_secret = '0bfecee7a78ee0e800b6bff85b08c140b91be4cc'
import json import os.path from django.apps import AppConfig from fmproject import settings class SsoConfig(AppConfig): base_config = json.load( open(os.path.join(settings.BASE_DIR, 'fmproject', 'config.json')) ) name = 'sso' github_client_id = base_config['github']['client_id'] github_client_secret = base_config['github']['client_secret']
Load github config from external file
Load github config from external file
Python
mit
favoritemedium/sso-prototype,favoritemedium/sso-prototype
from django.apps import AppConfig class SsoConfig(AppConfig): name = 'sso' github_client_id = '844189c44c56ff04e727' github_client_secret = '0bfecee7a78ee0e800b6bff85b08c140b91be4cc' Load github config from external file
import json import os.path from django.apps import AppConfig from fmproject import settings class SsoConfig(AppConfig): base_config = json.load( open(os.path.join(settings.BASE_DIR, 'fmproject', 'config.json')) ) name = 'sso' github_client_id = base_config['github']['client_id'] github_client_secret = base_config['github']['client_secret']
<commit_before>from django.apps import AppConfig class SsoConfig(AppConfig): name = 'sso' github_client_id = '844189c44c56ff04e727' github_client_secret = '0bfecee7a78ee0e800b6bff85b08c140b91be4cc' <commit_msg>Load github config from external file<commit_after>
import json import os.path from django.apps import AppConfig from fmproject import settings class SsoConfig(AppConfig): base_config = json.load( open(os.path.join(settings.BASE_DIR, 'fmproject', 'config.json')) ) name = 'sso' github_client_id = base_config['github']['client_id'] github_client_secret = base_config['github']['client_secret']
from django.apps import AppConfig class SsoConfig(AppConfig): name = 'sso' github_client_id = '844189c44c56ff04e727' github_client_secret = '0bfecee7a78ee0e800b6bff85b08c140b91be4cc' Load github config from external fileimport json import os.path from django.apps import AppConfig from fmproject import settings class SsoConfig(AppConfig): base_config = json.load( open(os.path.join(settings.BASE_DIR, 'fmproject', 'config.json')) ) name = 'sso' github_client_id = base_config['github']['client_id'] github_client_secret = base_config['github']['client_secret']
<commit_before>from django.apps import AppConfig class SsoConfig(AppConfig): name = 'sso' github_client_id = '844189c44c56ff04e727' github_client_secret = '0bfecee7a78ee0e800b6bff85b08c140b91be4cc' <commit_msg>Load github config from external file<commit_after>import json import os.path from django.apps import AppConfig from fmproject import settings class SsoConfig(AppConfig): base_config = json.load( open(os.path.join(settings.BASE_DIR, 'fmproject', 'config.json')) ) name = 'sso' github_client_id = base_config['github']['client_id'] github_client_secret = base_config['github']['client_secret']