commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0e5b3bdd05ba0621c25db4cb4117bd7b93a67725 | only_one.py | only_one.py | #!/usr/bin/env python2.7
from __future__ import absolute_import
import redis
import json
#POOL = redis.ConnectionPool(max_connections=4, host='localhost', db=5, port=6379)
#REDIS_CLIENT = redis.Redis(connection_pool=POOL)
data = json.load(open('config.json'))
REDIS_CLIENT = redis.Redis(host=data['oohost'], db=data['oodb'], port=data['ooport'])
def only_one(function=None, key="", timeout=None):
"""Enforce only one celery task at a time."""
def _dec(run_func):
"""Decorator."""
def _caller(*args, **kwargs):
"""Caller."""
ret_value = None
have_lock = False
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
have_lock = lock.acquire(blocking=False)
if have_lock:
ret_value = run_func(*args, **kwargs)
finally:
if have_lock:
lock.release()
return ret_value
return _caller
return _dec(function) if function is not None else _dec
| #!/usr/bin/env python2.7
from __future__ import absolute_import
import redis
import json
import os
#POOL = redis.ConnectionPool(max_connections=4, host='localhost', db=5, port=6379)
#REDIS_CLIENT = redis.Redis(connection_pool=POOL)
configfile = "%s/config.json" % (os.path.dirname(os.path.realpath(__file__)))
data = json.load(open(configfile))
REDIS_CLIENT = redis.Redis(host=data['oohost'], db=data['oodb'], port=data['ooport'])
def only_one(function=None, key="", timeout=None):
"""Enforce only one celery task at a time."""
def _dec(run_func):
"""Decorator."""
def _caller(*args, **kwargs):
"""Caller."""
ret_value = None
have_lock = False
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
have_lock = lock.acquire(blocking=False)
if have_lock:
ret_value = run_func(*args, **kwargs)
finally:
if have_lock:
lock.release()
return ret_value
return _caller
return _dec(function) if function is not None else _dec
| Set full static path for config.json file. Necessary for launching manually | Set full static path for config.json file. Necessary for launching manually
| Python | mit | viable-hartman/mage_scheduler,viable-hartman/mage_scheduler | #!/usr/bin/env python2.7
from __future__ import absolute_import
import redis
import json
#POOL = redis.ConnectionPool(max_connections=4, host='localhost', db=5, port=6379)
#REDIS_CLIENT = redis.Redis(connection_pool=POOL)
data = json.load(open('config.json'))
REDIS_CLIENT = redis.Redis(host=data['oohost'], db=data['oodb'], port=data['ooport'])
def only_one(function=None, key="", timeout=None):
"""Enforce only one celery task at a time."""
def _dec(run_func):
"""Decorator."""
def _caller(*args, **kwargs):
"""Caller."""
ret_value = None
have_lock = False
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
have_lock = lock.acquire(blocking=False)
if have_lock:
ret_value = run_func(*args, **kwargs)
finally:
if have_lock:
lock.release()
return ret_value
return _caller
return _dec(function) if function is not None else _dec
Set full static path for config.json file. Necessary for launching manually | #!/usr/bin/env python2.7
from __future__ import absolute_import
import redis
import json
import os
#POOL = redis.ConnectionPool(max_connections=4, host='localhost', db=5, port=6379)
#REDIS_CLIENT = redis.Redis(connection_pool=POOL)
configfile = "%s/config.json" % (os.path.dirname(os.path.realpath(__file__)))
data = json.load(open(configfile))
REDIS_CLIENT = redis.Redis(host=data['oohost'], db=data['oodb'], port=data['ooport'])
def only_one(function=None, key="", timeout=None):
"""Enforce only one celery task at a time."""
def _dec(run_func):
"""Decorator."""
def _caller(*args, **kwargs):
"""Caller."""
ret_value = None
have_lock = False
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
have_lock = lock.acquire(blocking=False)
if have_lock:
ret_value = run_func(*args, **kwargs)
finally:
if have_lock:
lock.release()
return ret_value
return _caller
return _dec(function) if function is not None else _dec
| <commit_before>#!/usr/bin/env python2.7
from __future__ import absolute_import
import redis
import json
#POOL = redis.ConnectionPool(max_connections=4, host='localhost', db=5, port=6379)
#REDIS_CLIENT = redis.Redis(connection_pool=POOL)
data = json.load(open('config.json'))
REDIS_CLIENT = redis.Redis(host=data['oohost'], db=data['oodb'], port=data['ooport'])
def only_one(function=None, key="", timeout=None):
"""Enforce only one celery task at a time."""
def _dec(run_func):
"""Decorator."""
def _caller(*args, **kwargs):
"""Caller."""
ret_value = None
have_lock = False
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
have_lock = lock.acquire(blocking=False)
if have_lock:
ret_value = run_func(*args, **kwargs)
finally:
if have_lock:
lock.release()
return ret_value
return _caller
return _dec(function) if function is not None else _dec
<commit_msg>Set full static path for config.json file. Necessary for launching manually<commit_after> | #!/usr/bin/env python2.7
from __future__ import absolute_import
import redis
import json
import os
#POOL = redis.ConnectionPool(max_connections=4, host='localhost', db=5, port=6379)
#REDIS_CLIENT = redis.Redis(connection_pool=POOL)
configfile = "%s/config.json" % (os.path.dirname(os.path.realpath(__file__)))
data = json.load(open(configfile))
REDIS_CLIENT = redis.Redis(host=data['oohost'], db=data['oodb'], port=data['ooport'])
def only_one(function=None, key="", timeout=None):
"""Enforce only one celery task at a time."""
def _dec(run_func):
"""Decorator."""
def _caller(*args, **kwargs):
"""Caller."""
ret_value = None
have_lock = False
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
have_lock = lock.acquire(blocking=False)
if have_lock:
ret_value = run_func(*args, **kwargs)
finally:
if have_lock:
lock.release()
return ret_value
return _caller
return _dec(function) if function is not None else _dec
| #!/usr/bin/env python2.7
from __future__ import absolute_import
import redis
import json
#POOL = redis.ConnectionPool(max_connections=4, host='localhost', db=5, port=6379)
#REDIS_CLIENT = redis.Redis(connection_pool=POOL)
data = json.load(open('config.json'))
REDIS_CLIENT = redis.Redis(host=data['oohost'], db=data['oodb'], port=data['ooport'])
def only_one(function=None, key="", timeout=None):
"""Enforce only one celery task at a time."""
def _dec(run_func):
"""Decorator."""
def _caller(*args, **kwargs):
"""Caller."""
ret_value = None
have_lock = False
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
have_lock = lock.acquire(blocking=False)
if have_lock:
ret_value = run_func(*args, **kwargs)
finally:
if have_lock:
lock.release()
return ret_value
return _caller
return _dec(function) if function is not None else _dec
Set full static path for config.json file. Necessary for launching manually#!/usr/bin/env python2.7
from __future__ import absolute_import
import redis
import json
import os
#POOL = redis.ConnectionPool(max_connections=4, host='localhost', db=5, port=6379)
#REDIS_CLIENT = redis.Redis(connection_pool=POOL)
configfile = "%s/config.json" % (os.path.dirname(os.path.realpath(__file__)))
data = json.load(open(configfile))
REDIS_CLIENT = redis.Redis(host=data['oohost'], db=data['oodb'], port=data['ooport'])
def only_one(function=None, key="", timeout=None):
"""Enforce only one celery task at a time."""
def _dec(run_func):
"""Decorator."""
def _caller(*args, **kwargs):
"""Caller."""
ret_value = None
have_lock = False
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
have_lock = lock.acquire(blocking=False)
if have_lock:
ret_value = run_func(*args, **kwargs)
finally:
if have_lock:
lock.release()
return ret_value
return _caller
return _dec(function) if function is not None else _dec
| <commit_before>#!/usr/bin/env python2.7
from __future__ import absolute_import
import redis
import json
#POOL = redis.ConnectionPool(max_connections=4, host='localhost', db=5, port=6379)
#REDIS_CLIENT = redis.Redis(connection_pool=POOL)
data = json.load(open('config.json'))
REDIS_CLIENT = redis.Redis(host=data['oohost'], db=data['oodb'], port=data['ooport'])
def only_one(function=None, key="", timeout=None):
"""Enforce only one celery task at a time."""
def _dec(run_func):
"""Decorator."""
def _caller(*args, **kwargs):
"""Caller."""
ret_value = None
have_lock = False
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
have_lock = lock.acquire(blocking=False)
if have_lock:
ret_value = run_func(*args, **kwargs)
finally:
if have_lock:
lock.release()
return ret_value
return _caller
return _dec(function) if function is not None else _dec
<commit_msg>Set full static path for config.json file. Necessary for launching manually<commit_after>#!/usr/bin/env python2.7
from __future__ import absolute_import
import redis
import json
import os
#POOL = redis.ConnectionPool(max_connections=4, host='localhost', db=5, port=6379)
#REDIS_CLIENT = redis.Redis(connection_pool=POOL)
configfile = "%s/config.json" % (os.path.dirname(os.path.realpath(__file__)))
data = json.load(open(configfile))
REDIS_CLIENT = redis.Redis(host=data['oohost'], db=data['oodb'], port=data['ooport'])
def only_one(function=None, key="", timeout=None):
"""Enforce only one celery task at a time."""
def _dec(run_func):
"""Decorator."""
def _caller(*args, **kwargs):
"""Caller."""
ret_value = None
have_lock = False
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
have_lock = lock.acquire(blocking=False)
if have_lock:
ret_value = run_func(*args, **kwargs)
finally:
if have_lock:
lock.release()
return ret_value
return _caller
return _dec(function) if function is not None else _dec
|
305892bc4e6c12fb24d42e16b35621ad90553c7c | testdoc/formatter.py | testdoc/formatter.py | """Formatters for creating documents.
A formatter is an object which accepts an output stream (usually a file or
standard output) and then provides a structured way for writing to that stream.
All formatters should provide 'title', 'section', 'subsection' and 'paragraph'
methods which write to the stream.
"""
class WikiFormatter(object):
"""Moin formatter."""
def __init__(self, stream):
self.stream = stream
def writeln(self, line):
self.stream.write('%s\n' % (line,))
def title(self, name):
self.writeln('= %s =\n' % (name,))
def section(self, name):
self.writeln('== %s ==\n' % (name,))
def subsection(self, name):
self.writeln('=== %s ===\n' % (name,))
def paragraph(self, text):
self.writeln('%s\n' % (text.strip(),))
| """Formatters for creating documents.
A formatter is an object which accepts an output stream (usually a file or
standard output) and then provides a structured way for writing to that stream.
All formatters should provide 'title', 'section', 'subsection' and 'paragraph'
methods which write to the stream.
"""
class WikiFormatter(object):
"""Moin formatter."""
def __init__(self, stream):
self.stream = stream
def writeln(self, line):
self.stream.write('%s\n' % (line,))
def title(self, name):
self.writeln('= %s =\n' % (name,))
def section(self, name):
self.writeln('')
self.writeln('== %s ==\n' % (name,))
def subsection(self, name):
self.writeln('=== %s ===\n' % (name,))
def paragraph(self, text):
self.writeln('%s\n' % (text.strip(),))
| Put a blank line before section headings. | Put a blank line before section headings. | Python | mit | testing-cabal/testdoc | """Formatters for creating documents.
A formatter is an object which accepts an output stream (usually a file or
standard output) and then provides a structured way for writing to that stream.
All formatters should provide 'title', 'section', 'subsection' and 'paragraph'
methods which write to the stream.
"""
class WikiFormatter(object):
"""Moin formatter."""
def __init__(self, stream):
self.stream = stream
def writeln(self, line):
self.stream.write('%s\n' % (line,))
def title(self, name):
self.writeln('= %s =\n' % (name,))
def section(self, name):
self.writeln('== %s ==\n' % (name,))
def subsection(self, name):
self.writeln('=== %s ===\n' % (name,))
def paragraph(self, text):
self.writeln('%s\n' % (text.strip(),))
Put a blank line before section headings. | """Formatters for creating documents.
A formatter is an object which accepts an output stream (usually a file or
standard output) and then provides a structured way for writing to that stream.
All formatters should provide 'title', 'section', 'subsection' and 'paragraph'
methods which write to the stream.
"""
class WikiFormatter(object):
"""Moin formatter."""
def __init__(self, stream):
self.stream = stream
def writeln(self, line):
self.stream.write('%s\n' % (line,))
def title(self, name):
self.writeln('= %s =\n' % (name,))
def section(self, name):
self.writeln('')
self.writeln('== %s ==\n' % (name,))
def subsection(self, name):
self.writeln('=== %s ===\n' % (name,))
def paragraph(self, text):
self.writeln('%s\n' % (text.strip(),))
| <commit_before>"""Formatters for creating documents.
A formatter is an object which accepts an output stream (usually a file or
standard output) and then provides a structured way for writing to that stream.
All formatters should provide 'title', 'section', 'subsection' and 'paragraph'
methods which write to the stream.
"""
class WikiFormatter(object):
"""Moin formatter."""
def __init__(self, stream):
self.stream = stream
def writeln(self, line):
self.stream.write('%s\n' % (line,))
def title(self, name):
self.writeln('= %s =\n' % (name,))
def section(self, name):
self.writeln('== %s ==\n' % (name,))
def subsection(self, name):
self.writeln('=== %s ===\n' % (name,))
def paragraph(self, text):
self.writeln('%s\n' % (text.strip(),))
<commit_msg>Put a blank line before section headings.<commit_after> | """Formatters for creating documents.
A formatter is an object which accepts an output stream (usually a file or
standard output) and then provides a structured way for writing to that stream.
All formatters should provide 'title', 'section', 'subsection' and 'paragraph'
methods which write to the stream.
"""
class WikiFormatter(object):
"""Moin formatter."""
def __init__(self, stream):
self.stream = stream
def writeln(self, line):
self.stream.write('%s\n' % (line,))
def title(self, name):
self.writeln('= %s =\n' % (name,))
def section(self, name):
self.writeln('')
self.writeln('== %s ==\n' % (name,))
def subsection(self, name):
self.writeln('=== %s ===\n' % (name,))
def paragraph(self, text):
self.writeln('%s\n' % (text.strip(),))
| """Formatters for creating documents.
A formatter is an object which accepts an output stream (usually a file or
standard output) and then provides a structured way for writing to that stream.
All formatters should provide 'title', 'section', 'subsection' and 'paragraph'
methods which write to the stream.
"""
class WikiFormatter(object):
"""Moin formatter."""
def __init__(self, stream):
self.stream = stream
def writeln(self, line):
self.stream.write('%s\n' % (line,))
def title(self, name):
self.writeln('= %s =\n' % (name,))
def section(self, name):
self.writeln('== %s ==\n' % (name,))
def subsection(self, name):
self.writeln('=== %s ===\n' % (name,))
def paragraph(self, text):
self.writeln('%s\n' % (text.strip(),))
Put a blank line before section headings."""Formatters for creating documents.
A formatter is an object which accepts an output stream (usually a file or
standard output) and then provides a structured way for writing to that stream.
All formatters should provide 'title', 'section', 'subsection' and 'paragraph'
methods which write to the stream.
"""
class WikiFormatter(object):
"""Moin formatter."""
def __init__(self, stream):
self.stream = stream
def writeln(self, line):
self.stream.write('%s\n' % (line,))
def title(self, name):
self.writeln('= %s =\n' % (name,))
def section(self, name):
self.writeln('')
self.writeln('== %s ==\n' % (name,))
def subsection(self, name):
self.writeln('=== %s ===\n' % (name,))
def paragraph(self, text):
self.writeln('%s\n' % (text.strip(),))
| <commit_before>"""Formatters for creating documents.
A formatter is an object which accepts an output stream (usually a file or
standard output) and then provides a structured way for writing to that stream.
All formatters should provide 'title', 'section', 'subsection' and 'paragraph'
methods which write to the stream.
"""
class WikiFormatter(object):
"""Moin formatter."""
def __init__(self, stream):
self.stream = stream
def writeln(self, line):
self.stream.write('%s\n' % (line,))
def title(self, name):
self.writeln('= %s =\n' % (name,))
def section(self, name):
self.writeln('== %s ==\n' % (name,))
def subsection(self, name):
self.writeln('=== %s ===\n' % (name,))
def paragraph(self, text):
self.writeln('%s\n' % (text.strip(),))
<commit_msg>Put a blank line before section headings.<commit_after>"""Formatters for creating documents.
A formatter is an object which accepts an output stream (usually a file or
standard output) and then provides a structured way for writing to that stream.
All formatters should provide 'title', 'section', 'subsection' and 'paragraph'
methods which write to the stream.
"""
class WikiFormatter(object):
"""Moin formatter."""
def __init__(self, stream):
self.stream = stream
def writeln(self, line):
self.stream.write('%s\n' % (line,))
def title(self, name):
self.writeln('= %s =\n' % (name,))
def section(self, name):
self.writeln('')
self.writeln('== %s ==\n' % (name,))
def subsection(self, name):
self.writeln('=== %s ===\n' % (name,))
def paragraph(self, text):
self.writeln('%s\n' % (text.strip(),))
|
bd29eb1eb1d265af33900d62e52430d62eef4144 | pylanguagetool/api.py | pylanguagetool/api.py | # -*- coding: utf-8 -*-
import requests
def get_languages(api_url):
r = requests.get(api_url + "languages")
return r.json()
def check(input_text, api_url, lang, mother_tongue=None, preferred_variants=None,
enabled_rules=None, disabled_rules=None,
enabled_categories=None, disabled_categories=None,
enabled_only=False, verbose=False,
**kwargs):
post_parameters = {
"text": input_text,
"language": lang,
}
if mother_tongue:
post_parameters["motherTongue"] = mother_tongue
if preferred_variants:
post_parameters["preferredVariants"] = preferred_variants
if enabled_rules:
post_parameters["enabledRules"] = enabled_rules
if disabled_rules:
post_parameters["disabledRules"] = disabled_rules
if enabled_categories:
post_parameters["enabledCategories"] = enabled_categories
if disabled_categories:
post_parameters["enabledCategories"] = disabled_categories
if enabled_only:
post_parameters["enabledOnly"] = True
r = requests.post(api_url + "check", data=post_parameters)
if r.status_code != 200:
raise ValueError(r.text)
if verbose:
print(post_parameters)
print(r.json())
return r.json()
| # -*- coding: utf-8 -*-
import requests
def get_languages(api_url):
r = requests.get(api_url + "languages")
return r.json()
def check(input_text, api_url, lang, mother_tongue=None, preferred_variants=None,
enabled_rules=None, disabled_rules=None,
enabled_categories=None, disabled_categories=None,
enabled_only=False, verbose=False,
**kwargs):
post_parameters = {
"text": input_text,
"language": lang,
}
if mother_tongue:
post_parameters["motherTongue"] = mother_tongue
if preferred_variants:
post_parameters["preferredVariants"] = preferred_variants
if enabled_rules:
post_parameters["enabledRules"] = enabled_rules
if disabled_rules:
post_parameters["disabledRules"] = disabled_rules
if enabled_categories:
post_parameters["enabledCategories"] = enabled_categories
if disabled_categories:
post_parameters["disabledCategories"] = disabled_categories
if enabled_only:
post_parameters["enabledOnly"] = True
r = requests.post(api_url + "check", data=post_parameters)
if r.status_code != 200:
raise ValueError(r.text)
if verbose:
print(post_parameters)
print(r.json())
return r.json()
| Fix name of parameter `disabledCategories` | Fix name of parameter `disabledCategories` | Python | mit | Findus23/pyLanguagetool | # -*- coding: utf-8 -*-
import requests
def get_languages(api_url):
r = requests.get(api_url + "languages")
return r.json()
def check(input_text, api_url, lang, mother_tongue=None, preferred_variants=None,
enabled_rules=None, disabled_rules=None,
enabled_categories=None, disabled_categories=None,
enabled_only=False, verbose=False,
**kwargs):
post_parameters = {
"text": input_text,
"language": lang,
}
if mother_tongue:
post_parameters["motherTongue"] = mother_tongue
if preferred_variants:
post_parameters["preferredVariants"] = preferred_variants
if enabled_rules:
post_parameters["enabledRules"] = enabled_rules
if disabled_rules:
post_parameters["disabledRules"] = disabled_rules
if enabled_categories:
post_parameters["enabledCategories"] = enabled_categories
if disabled_categories:
post_parameters["enabledCategories"] = disabled_categories
if enabled_only:
post_parameters["enabledOnly"] = True
r = requests.post(api_url + "check", data=post_parameters)
if r.status_code != 200:
raise ValueError(r.text)
if verbose:
print(post_parameters)
print(r.json())
return r.json()
Fix name of parameter `disabledCategories` | # -*- coding: utf-8 -*-
import requests
def get_languages(api_url):
r = requests.get(api_url + "languages")
return r.json()
def check(input_text, api_url, lang, mother_tongue=None, preferred_variants=None,
enabled_rules=None, disabled_rules=None,
enabled_categories=None, disabled_categories=None,
enabled_only=False, verbose=False,
**kwargs):
post_parameters = {
"text": input_text,
"language": lang,
}
if mother_tongue:
post_parameters["motherTongue"] = mother_tongue
if preferred_variants:
post_parameters["preferredVariants"] = preferred_variants
if enabled_rules:
post_parameters["enabledRules"] = enabled_rules
if disabled_rules:
post_parameters["disabledRules"] = disabled_rules
if enabled_categories:
post_parameters["enabledCategories"] = enabled_categories
if disabled_categories:
post_parameters["disabledCategories"] = disabled_categories
if enabled_only:
post_parameters["enabledOnly"] = True
r = requests.post(api_url + "check", data=post_parameters)
if r.status_code != 200:
raise ValueError(r.text)
if verbose:
print(post_parameters)
print(r.json())
return r.json()
| <commit_before># -*- coding: utf-8 -*-
import requests
def get_languages(api_url):
r = requests.get(api_url + "languages")
return r.json()
def check(input_text, api_url, lang, mother_tongue=None, preferred_variants=None,
enabled_rules=None, disabled_rules=None,
enabled_categories=None, disabled_categories=None,
enabled_only=False, verbose=False,
**kwargs):
post_parameters = {
"text": input_text,
"language": lang,
}
if mother_tongue:
post_parameters["motherTongue"] = mother_tongue
if preferred_variants:
post_parameters["preferredVariants"] = preferred_variants
if enabled_rules:
post_parameters["enabledRules"] = enabled_rules
if disabled_rules:
post_parameters["disabledRules"] = disabled_rules
if enabled_categories:
post_parameters["enabledCategories"] = enabled_categories
if disabled_categories:
post_parameters["enabledCategories"] = disabled_categories
if enabled_only:
post_parameters["enabledOnly"] = True
r = requests.post(api_url + "check", data=post_parameters)
if r.status_code != 200:
raise ValueError(r.text)
if verbose:
print(post_parameters)
print(r.json())
return r.json()
<commit_msg>Fix name of parameter `disabledCategories`<commit_after> | # -*- coding: utf-8 -*-
import requests
def get_languages(api_url):
r = requests.get(api_url + "languages")
return r.json()
def check(input_text, api_url, lang, mother_tongue=None, preferred_variants=None,
enabled_rules=None, disabled_rules=None,
enabled_categories=None, disabled_categories=None,
enabled_only=False, verbose=False,
**kwargs):
post_parameters = {
"text": input_text,
"language": lang,
}
if mother_tongue:
post_parameters["motherTongue"] = mother_tongue
if preferred_variants:
post_parameters["preferredVariants"] = preferred_variants
if enabled_rules:
post_parameters["enabledRules"] = enabled_rules
if disabled_rules:
post_parameters["disabledRules"] = disabled_rules
if enabled_categories:
post_parameters["enabledCategories"] = enabled_categories
if disabled_categories:
post_parameters["disabledCategories"] = disabled_categories
if enabled_only:
post_parameters["enabledOnly"] = True
r = requests.post(api_url + "check", data=post_parameters)
if r.status_code != 200:
raise ValueError(r.text)
if verbose:
print(post_parameters)
print(r.json())
return r.json()
| # -*- coding: utf-8 -*-
import requests
def get_languages(api_url):
r = requests.get(api_url + "languages")
return r.json()
def check(input_text, api_url, lang, mother_tongue=None, preferred_variants=None,
enabled_rules=None, disabled_rules=None,
enabled_categories=None, disabled_categories=None,
enabled_only=False, verbose=False,
**kwargs):
post_parameters = {
"text": input_text,
"language": lang,
}
if mother_tongue:
post_parameters["motherTongue"] = mother_tongue
if preferred_variants:
post_parameters["preferredVariants"] = preferred_variants
if enabled_rules:
post_parameters["enabledRules"] = enabled_rules
if disabled_rules:
post_parameters["disabledRules"] = disabled_rules
if enabled_categories:
post_parameters["enabledCategories"] = enabled_categories
if disabled_categories:
post_parameters["enabledCategories"] = disabled_categories
if enabled_only:
post_parameters["enabledOnly"] = True
r = requests.post(api_url + "check", data=post_parameters)
if r.status_code != 200:
raise ValueError(r.text)
if verbose:
print(post_parameters)
print(r.json())
return r.json()
Fix name of parameter `disabledCategories`# -*- coding: utf-8 -*-
import requests
def get_languages(api_url):
r = requests.get(api_url + "languages")
return r.json()
def check(input_text, api_url, lang, mother_tongue=None, preferred_variants=None,
enabled_rules=None, disabled_rules=None,
enabled_categories=None, disabled_categories=None,
enabled_only=False, verbose=False,
**kwargs):
post_parameters = {
"text": input_text,
"language": lang,
}
if mother_tongue:
post_parameters["motherTongue"] = mother_tongue
if preferred_variants:
post_parameters["preferredVariants"] = preferred_variants
if enabled_rules:
post_parameters["enabledRules"] = enabled_rules
if disabled_rules:
post_parameters["disabledRules"] = disabled_rules
if enabled_categories:
post_parameters["enabledCategories"] = enabled_categories
if disabled_categories:
post_parameters["disabledCategories"] = disabled_categories
if enabled_only:
post_parameters["enabledOnly"] = True
r = requests.post(api_url + "check", data=post_parameters)
if r.status_code != 200:
raise ValueError(r.text)
if verbose:
print(post_parameters)
print(r.json())
return r.json()
| <commit_before># -*- coding: utf-8 -*-
import requests
def get_languages(api_url):
r = requests.get(api_url + "languages")
return r.json()
def check(input_text, api_url, lang, mother_tongue=None, preferred_variants=None,
enabled_rules=None, disabled_rules=None,
enabled_categories=None, disabled_categories=None,
enabled_only=False, verbose=False,
**kwargs):
post_parameters = {
"text": input_text,
"language": lang,
}
if mother_tongue:
post_parameters["motherTongue"] = mother_tongue
if preferred_variants:
post_parameters["preferredVariants"] = preferred_variants
if enabled_rules:
post_parameters["enabledRules"] = enabled_rules
if disabled_rules:
post_parameters["disabledRules"] = disabled_rules
if enabled_categories:
post_parameters["enabledCategories"] = enabled_categories
if disabled_categories:
post_parameters["enabledCategories"] = disabled_categories
if enabled_only:
post_parameters["enabledOnly"] = True
r = requests.post(api_url + "check", data=post_parameters)
if r.status_code != 200:
raise ValueError(r.text)
if verbose:
print(post_parameters)
print(r.json())
return r.json()
<commit_msg>Fix name of parameter `disabledCategories`<commit_after># -*- coding: utf-8 -*-
import requests
def get_languages(api_url):
r = requests.get(api_url + "languages")
return r.json()
def check(input_text, api_url, lang, mother_tongue=None, preferred_variants=None,
enabled_rules=None, disabled_rules=None,
enabled_categories=None, disabled_categories=None,
enabled_only=False, verbose=False,
**kwargs):
post_parameters = {
"text": input_text,
"language": lang,
}
if mother_tongue:
post_parameters["motherTongue"] = mother_tongue
if preferred_variants:
post_parameters["preferredVariants"] = preferred_variants
if enabled_rules:
post_parameters["enabledRules"] = enabled_rules
if disabled_rules:
post_parameters["disabledRules"] = disabled_rules
if enabled_categories:
post_parameters["enabledCategories"] = enabled_categories
if disabled_categories:
post_parameters["disabledCategories"] = disabled_categories
if enabled_only:
post_parameters["enabledOnly"] = True
r = requests.post(api_url + "check", data=post_parameters)
if r.status_code != 200:
raise ValueError(r.text)
if verbose:
print(post_parameters)
print(r.json())
return r.json()
|
29e6e77b03569d39e484b47efd3b8230f30ee195 | eduid_signup/db.py | eduid_signup/db.py | import pymongo
from eduid_signup.compat import urlparse
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.Connection):
self.db_uri = urlparse.urlparse(db_uri)
self.connection = connection_factory(
host=self.db_uri.hostname or DEFAULT_MONGODB_HOST,
port=self.db_uri.port or DEFAULT_MONGODB_PORT,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
| import pymongo
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.MongoClient):
self.db_uri = db_uri
self.connection = connection_factory(
host=self.db_uri,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
| Allow Mongo connections to Mongo Replicaset Cluster | Allow Mongo connections to Mongo Replicaset Cluster
| Python | bsd-3-clause | SUNET/eduid-signup,SUNET/eduid-signup,SUNET/eduid-signup | import pymongo
from eduid_signup.compat import urlparse
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.Connection):
self.db_uri = urlparse.urlparse(db_uri)
self.connection = connection_factory(
host=self.db_uri.hostname or DEFAULT_MONGODB_HOST,
port=self.db_uri.port or DEFAULT_MONGODB_PORT,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
Allow Mongo connections to Mongo Replicaset Cluster | import pymongo
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.MongoClient):
self.db_uri = db_uri
self.connection = connection_factory(
host=self.db_uri,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
| <commit_before>import pymongo
from eduid_signup.compat import urlparse
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.Connection):
self.db_uri = urlparse.urlparse(db_uri)
self.connection = connection_factory(
host=self.db_uri.hostname or DEFAULT_MONGODB_HOST,
port=self.db_uri.port or DEFAULT_MONGODB_PORT,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
<commit_msg>Allow Mongo connections to Mongo Replicaset Cluster<commit_after> | import pymongo
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.MongoClient):
self.db_uri = db_uri
self.connection = connection_factory(
host=self.db_uri,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
| import pymongo
from eduid_signup.compat import urlparse
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.Connection):
self.db_uri = urlparse.urlparse(db_uri)
self.connection = connection_factory(
host=self.db_uri.hostname or DEFAULT_MONGODB_HOST,
port=self.db_uri.port or DEFAULT_MONGODB_PORT,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
Allow Mongo connections to Mongo Replicaset Clusterimport pymongo
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.MongoClient):
self.db_uri = db_uri
self.connection = connection_factory(
host=self.db_uri,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
| <commit_before>import pymongo
from eduid_signup.compat import urlparse
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.Connection):
self.db_uri = urlparse.urlparse(db_uri)
self.connection = connection_factory(
host=self.db_uri.hostname or DEFAULT_MONGODB_HOST,
port=self.db_uri.port or DEFAULT_MONGODB_PORT,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
<commit_msg>Allow Mongo connections to Mongo Replicaset Cluster<commit_after>import pymongo
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.MongoClient):
self.db_uri = db_uri
self.connection = connection_factory(
host=self.db_uri,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
|
51dcc5fddeb649ec582c435d6244ea4d2e4f8991 | zproject/jinja2/__init__.py | zproject/jinja2/__init__.py |
from typing import Any
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import slugify, pluralize
from django.urls import reverse
from django.utils import translation
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from .compressors import minified_js
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'render_markdown_path': render_markdown_path,
'minified_js': minified_js,
})
env.install_gettext_translations(translation, True)
env.filters['slugify'] = slugify
env.filters['pluralize'] = pluralize
env.filters['display_list'] = display_list
env.filters['device_action'] = device_action
return env
|
from typing import Any
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import slugify, pluralize
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from .compressors import minified_js
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'render_markdown_path': render_markdown_path,
'minified_js': minified_js,
})
env.install_gettext_translations(translation, True)
env.filters['slugify'] = slugify
env.filters['pluralize'] = pluralize
env.filters['display_list'] = display_list
env.filters['device_action'] = device_action
env.filters['timesince'] = timesince
return env
| Add django timesince filter to jinja2 filters. | templates: Add django timesince filter to jinja2 filters.
| Python | apache-2.0 | eeshangarg/zulip,brainwane/zulip,rht/zulip,brainwane/zulip,rishig/zulip,brainwane/zulip,brainwane/zulip,eeshangarg/zulip,synicalsyntax/zulip,punchagan/zulip,eeshangarg/zulip,synicalsyntax/zulip,zulip/zulip,punchagan/zulip,andersk/zulip,kou/zulip,zulip/zulip,punchagan/zulip,kou/zulip,rishig/zulip,showell/zulip,punchagan/zulip,synicalsyntax/zulip,timabbott/zulip,timabbott/zulip,rishig/zulip,shubhamdhama/zulip,shubhamdhama/zulip,kou/zulip,hackerkid/zulip,eeshangarg/zulip,rishig/zulip,rht/zulip,hackerkid/zulip,hackerkid/zulip,andersk/zulip,tommyip/zulip,kou/zulip,synicalsyntax/zulip,rht/zulip,punchagan/zulip,rishig/zulip,tommyip/zulip,hackerkid/zulip,showell/zulip,shubhamdhama/zulip,tommyip/zulip,showell/zulip,synicalsyntax/zulip,kou/zulip,shubhamdhama/zulip,andersk/zulip,punchagan/zulip,timabbott/zulip,shubhamdhama/zulip,rht/zulip,timabbott/zulip,rishig/zulip,timabbott/zulip,andersk/zulip,shubhamdhama/zulip,andersk/zulip,punchagan/zulip,zulip/zulip,zulip/zulip,andersk/zulip,tommyip/zulip,zulip/zulip,rht/zulip,brainwane/zulip,timabbott/zulip,showell/zulip,brainwane/zulip,shubhamdhama/zulip,tommyip/zulip,zulip/zulip,rishig/zulip,eeshangarg/zulip,hackerkid/zulip,kou/zulip,rht/zulip,hackerkid/zulip,synicalsyntax/zulip,showell/zulip,zulip/zulip,synicalsyntax/zulip,eeshangarg/zulip,kou/zulip,showell/zulip,eeshangarg/zulip,tommyip/zulip,rht/zulip,showell/zulip,timabbott/zulip,andersk/zulip,hackerkid/zulip,tommyip/zulip,brainwane/zulip |
from typing import Any
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import slugify, pluralize
from django.urls import reverse
from django.utils import translation
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from .compressors import minified_js
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'render_markdown_path': render_markdown_path,
'minified_js': minified_js,
})
env.install_gettext_translations(translation, True)
env.filters['slugify'] = slugify
env.filters['pluralize'] = pluralize
env.filters['display_list'] = display_list
env.filters['device_action'] = device_action
return env
templates: Add django timesince filter to jinja2 filters. |
from typing import Any
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import slugify, pluralize
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from .compressors import minified_js
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'render_markdown_path': render_markdown_path,
'minified_js': minified_js,
})
env.install_gettext_translations(translation, True)
env.filters['slugify'] = slugify
env.filters['pluralize'] = pluralize
env.filters['display_list'] = display_list
env.filters['device_action'] = device_action
env.filters['timesince'] = timesince
return env
| <commit_before>
from typing import Any
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import slugify, pluralize
from django.urls import reverse
from django.utils import translation
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from .compressors import minified_js
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'render_markdown_path': render_markdown_path,
'minified_js': minified_js,
})
env.install_gettext_translations(translation, True)
env.filters['slugify'] = slugify
env.filters['pluralize'] = pluralize
env.filters['display_list'] = display_list
env.filters['device_action'] = device_action
return env
<commit_msg>templates: Add django timesince filter to jinja2 filters.<commit_after> |
from typing import Any
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import slugify, pluralize
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from .compressors import minified_js
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'render_markdown_path': render_markdown_path,
'minified_js': minified_js,
})
env.install_gettext_translations(translation, True)
env.filters['slugify'] = slugify
env.filters['pluralize'] = pluralize
env.filters['display_list'] = display_list
env.filters['device_action'] = device_action
env.filters['timesince'] = timesince
return env
|
from typing import Any
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import slugify, pluralize
from django.urls import reverse
from django.utils import translation
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from .compressors import minified_js
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'render_markdown_path': render_markdown_path,
'minified_js': minified_js,
})
env.install_gettext_translations(translation, True)
env.filters['slugify'] = slugify
env.filters['pluralize'] = pluralize
env.filters['display_list'] = display_list
env.filters['device_action'] = device_action
return env
templates: Add django timesince filter to jinja2 filters.
from typing import Any
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import slugify, pluralize
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from .compressors import minified_js
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'render_markdown_path': render_markdown_path,
'minified_js': minified_js,
})
env.install_gettext_translations(translation, True)
env.filters['slugify'] = slugify
env.filters['pluralize'] = pluralize
env.filters['display_list'] = display_list
env.filters['device_action'] = device_action
env.filters['timesince'] = timesince
return env
| <commit_before>
from typing import Any
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import slugify, pluralize
from django.urls import reverse
from django.utils import translation
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from .compressors import minified_js
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'render_markdown_path': render_markdown_path,
'minified_js': minified_js,
})
env.install_gettext_translations(translation, True)
env.filters['slugify'] = slugify
env.filters['pluralize'] = pluralize
env.filters['display_list'] = display_list
env.filters['device_action'] = device_action
return env
<commit_msg>templates: Add django timesince filter to jinja2 filters.<commit_after>
from typing import Any
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import slugify, pluralize
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from .compressors import minified_js
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'render_markdown_path': render_markdown_path,
'minified_js': minified_js,
})
env.install_gettext_translations(translation, True)
env.filters['slugify'] = slugify
env.filters['pluralize'] = pluralize
env.filters['display_list'] = display_list
env.filters['device_action'] = device_action
env.filters['timesince'] = timesince
return env
|
bd9f64c43567135903f2b980c26f9369ba0df649 | dsub/_dsub_version.py | dsub/_dsub_version.py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.4.dev0'
| Update dsub version to 0.4.4.dev0 | Update dsub version to 0.4.4.dev0
PiperOrigin-RevId: 344150311
| Python | apache-2.0 | DataBiosphere/dsub,DataBiosphere/dsub | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3'
Update dsub version to 0.4.4.dev0
PiperOrigin-RevId: 344150311 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.4.dev0'
| <commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3'
<commit_msg>Update dsub version to 0.4.4.dev0
PiperOrigin-RevId: 344150311<commit_after> | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.4.dev0'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3'
Update dsub version to 0.4.4.dev0
PiperOrigin-RevId: 344150311# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.4.dev0'
| <commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3'
<commit_msg>Update dsub version to 0.4.4.dev0
PiperOrigin-RevId: 344150311<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.4.dev0'
|
56ad144e447afe961905c67221d961e84aa3e41e | dsub/_dsub_version.py | dsub/_dsub_version.py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.8.dev0'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.8'
| Update dsub version to 0.3.8 | Update dsub version to 0.3.8
PiperOrigin-RevId: 313423046
| Python | apache-2.0 | DataBiosphere/dsub,DataBiosphere/dsub | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.8.dev0'
Update dsub version to 0.3.8
PiperOrigin-RevId: 313423046 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.8'
| <commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.8.dev0'
<commit_msg>Update dsub version to 0.3.8
PiperOrigin-RevId: 313423046<commit_after> | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.8'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.8.dev0'
Update dsub version to 0.3.8
PiperOrigin-RevId: 313423046# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.8'
| <commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.8.dev0'
<commit_msg>Update dsub version to 0.3.8
PiperOrigin-RevId: 313423046<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.8'
|
824b2de018ff41bc17c93c3e0e7135137438159c | crits/standards/forms.py | crits/standards/forms.py | from django import forms
from crits.core.handlers import get_source_names
from crits.core.user_tools import get_user_organization
class UploadStandardsForm(forms.Form):
"""
Django form for uploading a standards document.
"""
error_css_class = 'error'
required_css_class = 'required'
filedata = forms.FileField()
source = forms.ChoiceField(required=True)
reference = forms.ChoiceField(required=False)
make_event = forms.BooleanField(required=False, label="Create event", initial=True)
def __init__(self, username, *args, **kwargs):
super(UploadStandardsForm, self).__init__(*args, **kwargs)
self.fields['source'].choices = [(c.name,
c.name) for c in get_source_names(True,
True,
username)]
self.fields['source'].initial = get_user_organization(username)
| from django import forms
from crits.core.handlers import get_source_names
from crits.core.user_tools import get_user_organization
class UploadStandardsForm(forms.Form):
"""
Django form for uploading a standards document.
"""
error_css_class = 'error'
required_css_class = 'required'
filedata = forms.FileField()
source = forms.ChoiceField(required=True)
reference = forms.CharField(required=False)
make_event = forms.BooleanField(required=False, label="Create event", initial=True)
def __init__(self, username, *args, **kwargs):
super(UploadStandardsForm, self).__init__(*args, **kwargs)
self.fields['source'].choices = [(c.name,
c.name) for c in get_source_names(True,
True,
username)]
self.fields['source'].initial = get_user_organization(username)
| Fix error in field type. | Fix error in field type.
Should be a Char field, not a choice fie.d
| Python | mit | davidhdz/crits,ckane/crits,ckane/crits,kaoscoach/crits,lakiw/cripts,blaquee/crits,ckane/crits,HardlyHaki/crits,korrosivesec/crits,dreardon/crits,0x3a/crits,Magicked/crits,cfossace/crits,davidhdz/crits,blaquee/crits,lakiw/cripts,seanthegeek/crits,jinverar/crits,lakiw/cripts,dreardon/crits,jhuapl-marti/marti,0x3a/crits,cdorer/crits,HardlyHaki/crits,kaoscoach/crits,DukeOfHazard/crits,lakiw/cripts,jhuapl-marti/marti,jinverar/crits,cdorer/crits,cdorer/crits,seanthegeek/crits,dreardon/crits,davidhdz/crits,kaoscoach/crits,korrosivesec/crits,ckane/crits,blaquee/crits,DukeOfHazard/crits,jhuapl-marti/marti,seanthegeek/crits,Magicked/crits,Magicked/crits,HardlyHaki/crits,Lambdanaut/crits,Magicked/crits,HardlyHaki/crits,cfossace/crits,jhuapl-marti/marti,DukeOfHazard/crits,davidhdz/crits,cfossace/crits,Lambdanaut/crits,korrosivesec/crits,0x3a/crits,seanthegeek/crits,kaoscoach/crits,jinverar/crits,jinverar/crits,0x3a/crits,dreardon/crits,DukeOfHazard/crits,korrosivesec/crits,blaquee/crits,davidhdz/crits,Lambdanaut/crits,seanthegeek/crits,Lambdanaut/crits,cfossace/crits,cdorer/crits | from django import forms
from crits.core.handlers import get_source_names
from crits.core.user_tools import get_user_organization
class UploadStandardsForm(forms.Form):
"""
Django form for uploading a standards document.
"""
error_css_class = 'error'
required_css_class = 'required'
filedata = forms.FileField()
source = forms.ChoiceField(required=True)
reference = forms.ChoiceField(required=False)
make_event = forms.BooleanField(required=False, label="Create event", initial=True)
def __init__(self, username, *args, **kwargs):
super(UploadStandardsForm, self).__init__(*args, **kwargs)
self.fields['source'].choices = [(c.name,
c.name) for c in get_source_names(True,
True,
username)]
self.fields['source'].initial = get_user_organization(username)
Fix error in field type.
Should be a Char field, not a choice fie.d | from django import forms
from crits.core.handlers import get_source_names
from crits.core.user_tools import get_user_organization
class UploadStandardsForm(forms.Form):
"""
Django form for uploading a standards document.
"""
error_css_class = 'error'
required_css_class = 'required'
filedata = forms.FileField()
source = forms.ChoiceField(required=True)
reference = forms.CharField(required=False)
make_event = forms.BooleanField(required=False, label="Create event", initial=True)
def __init__(self, username, *args, **kwargs):
super(UploadStandardsForm, self).__init__(*args, **kwargs)
self.fields['source'].choices = [(c.name,
c.name) for c in get_source_names(True,
True,
username)]
self.fields['source'].initial = get_user_organization(username)
| <commit_before>from django import forms
from crits.core.handlers import get_source_names
from crits.core.user_tools import get_user_organization
class UploadStandardsForm(forms.Form):
"""
Django form for uploading a standards document.
"""
error_css_class = 'error'
required_css_class = 'required'
filedata = forms.FileField()
source = forms.ChoiceField(required=True)
reference = forms.ChoiceField(required=False)
make_event = forms.BooleanField(required=False, label="Create event", initial=True)
def __init__(self, username, *args, **kwargs):
super(UploadStandardsForm, self).__init__(*args, **kwargs)
self.fields['source'].choices = [(c.name,
c.name) for c in get_source_names(True,
True,
username)]
self.fields['source'].initial = get_user_organization(username)
<commit_msg>Fix error in field type.
Should be a Char field, not a choice fie.d<commit_after> | from django import forms
from crits.core.handlers import get_source_names
from crits.core.user_tools import get_user_organization
class UploadStandardsForm(forms.Form):
"""
Django form for uploading a standards document.
"""
error_css_class = 'error'
required_css_class = 'required'
filedata = forms.FileField()
source = forms.ChoiceField(required=True)
reference = forms.CharField(required=False)
make_event = forms.BooleanField(required=False, label="Create event", initial=True)
def __init__(self, username, *args, **kwargs):
super(UploadStandardsForm, self).__init__(*args, **kwargs)
self.fields['source'].choices = [(c.name,
c.name) for c in get_source_names(True,
True,
username)]
self.fields['source'].initial = get_user_organization(username)
| from django import forms
from crits.core.handlers import get_source_names
from crits.core.user_tools import get_user_organization
class UploadStandardsForm(forms.Form):
"""
Django form for uploading a standards document.
"""
error_css_class = 'error'
required_css_class = 'required'
filedata = forms.FileField()
source = forms.ChoiceField(required=True)
reference = forms.ChoiceField(required=False)
make_event = forms.BooleanField(required=False, label="Create event", initial=True)
def __init__(self, username, *args, **kwargs):
super(UploadStandardsForm, self).__init__(*args, **kwargs)
self.fields['source'].choices = [(c.name,
c.name) for c in get_source_names(True,
True,
username)]
self.fields['source'].initial = get_user_organization(username)
Fix error in field type.
Should be a Char field, not a choice fie.dfrom django import forms
from crits.core.handlers import get_source_names
from crits.core.user_tools import get_user_organization
class UploadStandardsForm(forms.Form):
"""
Django form for uploading a standards document.
"""
error_css_class = 'error'
required_css_class = 'required'
filedata = forms.FileField()
source = forms.ChoiceField(required=True)
reference = forms.CharField(required=False)
make_event = forms.BooleanField(required=False, label="Create event", initial=True)
def __init__(self, username, *args, **kwargs):
super(UploadStandardsForm, self).__init__(*args, **kwargs)
self.fields['source'].choices = [(c.name,
c.name) for c in get_source_names(True,
True,
username)]
self.fields['source'].initial = get_user_organization(username)
| <commit_before>from django import forms
from crits.core.handlers import get_source_names
from crits.core.user_tools import get_user_organization
class UploadStandardsForm(forms.Form):
"""
Django form for uploading a standards document.
"""
error_css_class = 'error'
required_css_class = 'required'
filedata = forms.FileField()
source = forms.ChoiceField(required=True)
reference = forms.ChoiceField(required=False)
make_event = forms.BooleanField(required=False, label="Create event", initial=True)
def __init__(self, username, *args, **kwargs):
super(UploadStandardsForm, self).__init__(*args, **kwargs)
self.fields['source'].choices = [(c.name,
c.name) for c in get_source_names(True,
True,
username)]
self.fields['source'].initial = get_user_organization(username)
<commit_msg>Fix error in field type.
Should be a Char field, not a choice fie.d<commit_after>from django import forms
from crits.core.handlers import get_source_names
from crits.core.user_tools import get_user_organization
class UploadStandardsForm(forms.Form):
"""
Django form for uploading a standards document.
"""
error_css_class = 'error'
required_css_class = 'required'
filedata = forms.FileField()
source = forms.ChoiceField(required=True)
reference = forms.CharField(required=False)
make_event = forms.BooleanField(required=False, label="Create event", initial=True)
def __init__(self, username, *args, **kwargs):
super(UploadStandardsForm, self).__init__(*args, **kwargs)
self.fields['source'].choices = [(c.name,
c.name) for c in get_source_names(True,
True,
username)]
self.fields['source'].initial = get_user_organization(username)
|
446fe01898a89c4d6beea6ac85b2fbe642b87265 | cross_platform_codecs.py | cross_platform_codecs.py | import sublime
import sys
class CrossPlaformCodecs():
@classmethod
def decode_line(self, line):
line = line.rstrip()
decoded_line = self.force_decode(line) if sys.version_info >= (3, 0) else line
return str(decoded_line) + "\n"
@classmethod
def force_decode(self, text):
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
if sublime.platform() == "windows":
text = self.decode_windows_line(text)
return text
@classmethod
def decode_windows_line(self, text):
# Import only for Windows
import locale, subprocess
# STDERR gets the wrong encoding, use chcp to get the real one
proccess = subprocess.Popen(["chcp"], shell=True, stdout=subprocess.PIPE)
(chcp, _) = proccess.communicate()
# Decode using the locale preferred encoding (for example 'cp1251') and remove newlines
chcp = chcp.decode(locale.getpreferredencoding()).strip()
# Get the actual number
chcp = chcp.split(" ")[-1]
# Actually decode
return text.decode("cp" + chcp)
@classmethod
def encode_process_command(self, command):
is_sublime_2_and_in_windows = sublime.platform() == "windows" and int(sublime.version()) < 3000
return command.encode(sys.getfilesystemencoding()) if is_sublime_2_and_in_windows else command | import sublime
import sys
import re
class CrossPlaformCodecs():
@classmethod
def decode_line(self, line):
line = line.rstrip()
decoded_line = self.force_decode(line) if sys.version_info >= (3, 0) else line
decoded_line = re.sub(r'\033\[(\d{1,2}m|\d\w)', '', str(decoded_line))
return decoded_line + "\n"
@classmethod
def force_decode(self, text):
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
if sublime.platform() == "windows":
text = self.decode_windows_line(text)
return text
@classmethod
def decode_windows_line(self, text):
# Import only for Windows
import locale, subprocess
# STDERR gets the wrong encoding, use chcp to get the real one
proccess = subprocess.Popen(["chcp"], shell=True, stdout=subprocess.PIPE)
(chcp, _) = proccess.communicate()
# Decode using the locale preferred encoding (for example 'cp1251') and remove newlines
chcp = chcp.decode(locale.getpreferredencoding()).strip()
# Get the actual number
chcp = chcp.split(" ")[-1]
# Actually decode
return text.decode("cp" + chcp)
@classmethod
def encode_process_command(self, command):
is_sublime_2_and_in_windows = sublime.platform() == "windows" and int(sublime.version()) < 3000
return command.encode(sys.getfilesystemencoding()) if is_sublime_2_and_in_windows else command | Remove terminal Unicode Escape codes | Remove terminal Unicode Escape codes
| Python | mit | nickgzzjr/sublime-gulp,NicoSantangelo/sublime-gulp,nickgzzjr/sublime-gulp,NicoSantangelo/sublime-gulp | import sublime
import sys
class CrossPlaformCodecs():
@classmethod
def decode_line(self, line):
line = line.rstrip()
decoded_line = self.force_decode(line) if sys.version_info >= (3, 0) else line
return str(decoded_line) + "\n"
@classmethod
def force_decode(self, text):
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
if sublime.platform() == "windows":
text = self.decode_windows_line(text)
return text
@classmethod
def decode_windows_line(self, text):
# Import only for Windows
import locale, subprocess
# STDERR gets the wrong encoding, use chcp to get the real one
proccess = subprocess.Popen(["chcp"], shell=True, stdout=subprocess.PIPE)
(chcp, _) = proccess.communicate()
# Decode using the locale preferred encoding (for example 'cp1251') and remove newlines
chcp = chcp.decode(locale.getpreferredencoding()).strip()
# Get the actual number
chcp = chcp.split(" ")[-1]
# Actually decode
return text.decode("cp" + chcp)
@classmethod
def encode_process_command(self, command):
is_sublime_2_and_in_windows = sublime.platform() == "windows" and int(sublime.version()) < 3000
return command.encode(sys.getfilesystemencoding()) if is_sublime_2_and_in_windows else commandRemove terminal Unicode Escape codes | import sublime
import sys
import re
class CrossPlaformCodecs():
@classmethod
def decode_line(self, line):
line = line.rstrip()
decoded_line = self.force_decode(line) if sys.version_info >= (3, 0) else line
decoded_line = re.sub(r'\033\[(\d{1,2}m|\d\w)', '', str(decoded_line))
return decoded_line + "\n"
@classmethod
def force_decode(self, text):
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
if sublime.platform() == "windows":
text = self.decode_windows_line(text)
return text
@classmethod
def decode_windows_line(self, text):
# Import only for Windows
import locale, subprocess
# STDERR gets the wrong encoding, use chcp to get the real one
proccess = subprocess.Popen(["chcp"], shell=True, stdout=subprocess.PIPE)
(chcp, _) = proccess.communicate()
# Decode using the locale preferred encoding (for example 'cp1251') and remove newlines
chcp = chcp.decode(locale.getpreferredencoding()).strip()
# Get the actual number
chcp = chcp.split(" ")[-1]
# Actually decode
return text.decode("cp" + chcp)
@classmethod
def encode_process_command(self, command):
is_sublime_2_and_in_windows = sublime.platform() == "windows" and int(sublime.version()) < 3000
return command.encode(sys.getfilesystemencoding()) if is_sublime_2_and_in_windows else command | <commit_before>import sublime
import sys
class CrossPlaformCodecs():
@classmethod
def decode_line(self, line):
line = line.rstrip()
decoded_line = self.force_decode(line) if sys.version_info >= (3, 0) else line
return str(decoded_line) + "\n"
@classmethod
def force_decode(self, text):
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
if sublime.platform() == "windows":
text = self.decode_windows_line(text)
return text
@classmethod
def decode_windows_line(self, text):
# Import only for Windows
import locale, subprocess
# STDERR gets the wrong encoding, use chcp to get the real one
proccess = subprocess.Popen(["chcp"], shell=True, stdout=subprocess.PIPE)
(chcp, _) = proccess.communicate()
# Decode using the locale preferred encoding (for example 'cp1251') and remove newlines
chcp = chcp.decode(locale.getpreferredencoding()).strip()
# Get the actual number
chcp = chcp.split(" ")[-1]
# Actually decode
return text.decode("cp" + chcp)
@classmethod
def encode_process_command(self, command):
is_sublime_2_and_in_windows = sublime.platform() == "windows" and int(sublime.version()) < 3000
return command.encode(sys.getfilesystemencoding()) if is_sublime_2_and_in_windows else command<commit_msg>Remove terminal Unicode Escape codes<commit_after> | import sublime
import sys
import re
class CrossPlaformCodecs():
@classmethod
def decode_line(self, line):
line = line.rstrip()
decoded_line = self.force_decode(line) if sys.version_info >= (3, 0) else line
decoded_line = re.sub(r'\033\[(\d{1,2}m|\d\w)', '', str(decoded_line))
return decoded_line + "\n"
@classmethod
def force_decode(self, text):
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
if sublime.platform() == "windows":
text = self.decode_windows_line(text)
return text
@classmethod
def decode_windows_line(self, text):
# Import only for Windows
import locale, subprocess
# STDERR gets the wrong encoding, use chcp to get the real one
proccess = subprocess.Popen(["chcp"], shell=True, stdout=subprocess.PIPE)
(chcp, _) = proccess.communicate()
# Decode using the locale preferred encoding (for example 'cp1251') and remove newlines
chcp = chcp.decode(locale.getpreferredencoding()).strip()
# Get the actual number
chcp = chcp.split(" ")[-1]
# Actually decode
return text.decode("cp" + chcp)
@classmethod
def encode_process_command(self, command):
is_sublime_2_and_in_windows = sublime.platform() == "windows" and int(sublime.version()) < 3000
return command.encode(sys.getfilesystemencoding()) if is_sublime_2_and_in_windows else command | import sublime
import sys
class CrossPlaformCodecs():
@classmethod
def decode_line(self, line):
line = line.rstrip()
decoded_line = self.force_decode(line) if sys.version_info >= (3, 0) else line
return str(decoded_line) + "\n"
@classmethod
def force_decode(self, text):
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
if sublime.platform() == "windows":
text = self.decode_windows_line(text)
return text
@classmethod
def decode_windows_line(self, text):
# Import only for Windows
import locale, subprocess
# STDERR gets the wrong encoding, use chcp to get the real one
proccess = subprocess.Popen(["chcp"], shell=True, stdout=subprocess.PIPE)
(chcp, _) = proccess.communicate()
# Decode using the locale preferred encoding (for example 'cp1251') and remove newlines
chcp = chcp.decode(locale.getpreferredencoding()).strip()
# Get the actual number
chcp = chcp.split(" ")[-1]
# Actually decode
return text.decode("cp" + chcp)
@classmethod
def encode_process_command(self, command):
is_sublime_2_and_in_windows = sublime.platform() == "windows" and int(sublime.version()) < 3000
return command.encode(sys.getfilesystemencoding()) if is_sublime_2_and_in_windows else commandRemove terminal Unicode Escape codesimport sublime
import sys
import re
class CrossPlaformCodecs():
@classmethod
def decode_line(self, line):
line = line.rstrip()
decoded_line = self.force_decode(line) if sys.version_info >= (3, 0) else line
decoded_line = re.sub(r'\033\[(\d{1,2}m|\d\w)', '', str(decoded_line))
return decoded_line + "\n"
@classmethod
def force_decode(self, text):
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
if sublime.platform() == "windows":
text = self.decode_windows_line(text)
return text
@classmethod
def decode_windows_line(self, text):
# Import only for Windows
import locale, subprocess
# STDERR gets the wrong encoding, use chcp to get the real one
proccess = subprocess.Popen(["chcp"], shell=True, stdout=subprocess.PIPE)
(chcp, _) = proccess.communicate()
# Decode using the locale preferred encoding (for example 'cp1251') and remove newlines
chcp = chcp.decode(locale.getpreferredencoding()).strip()
# Get the actual number
chcp = chcp.split(" ")[-1]
# Actually decode
return text.decode("cp" + chcp)
@classmethod
def encode_process_command(self, command):
is_sublime_2_and_in_windows = sublime.platform() == "windows" and int(sublime.version()) < 3000
return command.encode(sys.getfilesystemencoding()) if is_sublime_2_and_in_windows else command | <commit_before>import sublime
import sys
class CrossPlaformCodecs():
@classmethod
def decode_line(self, line):
line = line.rstrip()
decoded_line = self.force_decode(line) if sys.version_info >= (3, 0) else line
return str(decoded_line) + "\n"
@classmethod
def force_decode(self, text):
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
if sublime.platform() == "windows":
text = self.decode_windows_line(text)
return text
@classmethod
def decode_windows_line(self, text):
# Import only for Windows
import locale, subprocess
# STDERR gets the wrong encoding, use chcp to get the real one
proccess = subprocess.Popen(["chcp"], shell=True, stdout=subprocess.PIPE)
(chcp, _) = proccess.communicate()
# Decode using the locale preferred encoding (for example 'cp1251') and remove newlines
chcp = chcp.decode(locale.getpreferredencoding()).strip()
# Get the actual number
chcp = chcp.split(" ")[-1]
# Actually decode
return text.decode("cp" + chcp)
@classmethod
def encode_process_command(self, command):
is_sublime_2_and_in_windows = sublime.platform() == "windows" and int(sublime.version()) < 3000
return command.encode(sys.getfilesystemencoding()) if is_sublime_2_and_in_windows else command<commit_msg>Remove terminal Unicode Escape codes<commit_after>import sublime
import sys
import re
class CrossPlaformCodecs():
@classmethod
def decode_line(self, line):
line = line.rstrip()
decoded_line = self.force_decode(line) if sys.version_info >= (3, 0) else line
decoded_line = re.sub(r'\033\[(\d{1,2}m|\d\w)', '', str(decoded_line))
return decoded_line + "\n"
@classmethod
def force_decode(self, text):
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
if sublime.platform() == "windows":
text = self.decode_windows_line(text)
return text
@classmethod
def decode_windows_line(self, text):
# Import only for Windows
import locale, subprocess
# STDERR gets the wrong encoding, use chcp to get the real one
proccess = subprocess.Popen(["chcp"], shell=True, stdout=subprocess.PIPE)
(chcp, _) = proccess.communicate()
# Decode using the locale preferred encoding (for example 'cp1251') and remove newlines
chcp = chcp.decode(locale.getpreferredencoding()).strip()
# Get the actual number
chcp = chcp.split(" ")[-1]
# Actually decode
return text.decode("cp" + chcp)
@classmethod
def encode_process_command(self, command):
is_sublime_2_and_in_windows = sublime.platform() == "windows" and int(sublime.version()) < 3000
return command.encode(sys.getfilesystemencoding()) if is_sublime_2_and_in_windows else command |
312c386cda37b8d39c82c30739cdc7162a5035c7 | util/versioncheck.py | util/versioncheck.py | #!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "grep -or 'Mininet \w\.\w\.\w[\w\+]*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
| #!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "grep -or 'Mininet \w\+\.\w\+\.\w\+[+]*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
| Fix regex to support Mininet 20.30.40+++ | Fix regex to support Mininet 20.30.40+++
| Python | bsd-3-clause | mininet/mininet,mininet/mininet,mininet/mininet | #!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "grep -or 'Mininet \w\.\w\.\w[\w\+]*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
Fix regex to support Mininet 20.30.40+++ | #!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "grep -or 'Mininet \w\+\.\w\+\.\w\+[+]*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
| <commit_before>#!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "grep -or 'Mininet \w\.\w\.\w[\w\+]*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
<commit_msg>Fix regex to support Mininet 20.30.40+++<commit_after> | #!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "grep -or 'Mininet \w\+\.\w\+\.\w\+[+]*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
| #!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "grep -or 'Mininet \w\.\w\.\w[\w\+]*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
Fix regex to support Mininet 20.30.40+++#!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "grep -or 'Mininet \w\+\.\w\+\.\w\+[+]*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
| <commit_before>#!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "grep -or 'Mininet \w\.\w\.\w[\w\+]*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
<commit_msg>Fix regex to support Mininet 20.30.40+++<commit_after>#!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "grep -or 'Mininet \w\+\.\w\+\.\w\+[+]*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
|
d2967110a2025c255dd496b85313c5c948b4150a | debuild/models/package.py | debuild/models/package.py | # Copyright (c) 2012 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debuild.models import DebuildDatabaseObject, db, strtype
class Package(DebuildDatabaseObject):
_table = "packages"
def __init__(self, thing):
if isinstance(thing, strtype):
self._record = self._fetch_by_id(thing)
if self._record is None:
raise Exception
else:
self._record = thing
def reports(self):
return db.reports.find({"package": self._record['_id']})
| # Copyright (c) 2012 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debuild.models import DebuildDatabaseObject, db, strtype
from debuild.models.report import Report
class Package(DebuildDatabaseObject):
_table = "packages"
def __init__(self, thing):
if isinstance(thing, strtype):
self._record = self._fetch_by_id(thing)
if self._record is None:
raise Exception
else:
self._record = thing
def reports(self):
return (Report(x) for x in
db.reports.find({"package": self._record['_id']}))
| Move to a generator thinger. | Move to a generator thinger.
| Python | mit | opencollab/debile-web,opencollab/debile-web,paultag/debuild.me,opencollab/debile-web,paultag/debuild.me | # Copyright (c) 2012 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debuild.models import DebuildDatabaseObject, db, strtype
class Package(DebuildDatabaseObject):
_table = "packages"
def __init__(self, thing):
if isinstance(thing, strtype):
self._record = self._fetch_by_id(thing)
if self._record is None:
raise Exception
else:
self._record = thing
def reports(self):
return db.reports.find({"package": self._record['_id']})
Move to a generator thinger. | # Copyright (c) 2012 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debuild.models import DebuildDatabaseObject, db, strtype
from debuild.models.report import Report
class Package(DebuildDatabaseObject):
_table = "packages"
def __init__(self, thing):
if isinstance(thing, strtype):
self._record = self._fetch_by_id(thing)
if self._record is None:
raise Exception
else:
self._record = thing
def reports(self):
return (Report(x) for x in
db.reports.find({"package": self._record['_id']}))
| <commit_before># Copyright (c) 2012 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debuild.models import DebuildDatabaseObject, db, strtype
class Package(DebuildDatabaseObject):
_table = "packages"
def __init__(self, thing):
if isinstance(thing, strtype):
self._record = self._fetch_by_id(thing)
if self._record is None:
raise Exception
else:
self._record = thing
def reports(self):
return db.reports.find({"package": self._record['_id']})
<commit_msg>Move to a generator thinger.<commit_after> | # Copyright (c) 2012 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debuild.models import DebuildDatabaseObject, db, strtype
from debuild.models.report import Report
class Package(DebuildDatabaseObject):
_table = "packages"
def __init__(self, thing):
if isinstance(thing, strtype):
self._record = self._fetch_by_id(thing)
if self._record is None:
raise Exception
else:
self._record = thing
def reports(self):
return (Report(x) for x in
db.reports.find({"package": self._record['_id']}))
| # Copyright (c) 2012 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debuild.models import DebuildDatabaseObject, db, strtype
class Package(DebuildDatabaseObject):
_table = "packages"
def __init__(self, thing):
if isinstance(thing, strtype):
self._record = self._fetch_by_id(thing)
if self._record is None:
raise Exception
else:
self._record = thing
def reports(self):
return db.reports.find({"package": self._record['_id']})
Move to a generator thinger.# Copyright (c) 2012 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debuild.models import DebuildDatabaseObject, db, strtype
from debuild.models.report import Report
class Package(DebuildDatabaseObject):
_table = "packages"
def __init__(self, thing):
if isinstance(thing, strtype):
self._record = self._fetch_by_id(thing)
if self._record is None:
raise Exception
else:
self._record = thing
def reports(self):
return (Report(x) for x in
db.reports.find({"package": self._record['_id']}))
| <commit_before># Copyright (c) 2012 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debuild.models import DebuildDatabaseObject, db, strtype
class Package(DebuildDatabaseObject):
_table = "packages"
def __init__(self, thing):
if isinstance(thing, strtype):
self._record = self._fetch_by_id(thing)
if self._record is None:
raise Exception
else:
self._record = thing
def reports(self):
return db.reports.find({"package": self._record['_id']})
<commit_msg>Move to a generator thinger.<commit_after># Copyright (c) 2012 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debuild.models import DebuildDatabaseObject, db, strtype
from debuild.models.report import Report
class Package(DebuildDatabaseObject):
_table = "packages"
def __init__(self, thing):
if isinstance(thing, strtype):
self._record = self._fetch_by_id(thing)
if self._record is None:
raise Exception
else:
self._record = thing
def reports(self):
return (Report(x) for x in
db.reports.find({"package": self._record['_id']}))
|
788a8a203225756bd16084f32090bf825453dfa8 | jupyter_nbconvert_config.py | jupyter_nbconvert_config.py | from jupyter_core.paths import jupyter_config_dir, jupyter_data_dir
import os
import sys
sys.path.append(os.path.join(jupyter_data_dir(), 'extensions'))
c = get_config()
c.Exporter.template_path = [os.path.join(jupyter_data_dir(), 'templates') ]
| from jupyter_core.paths import jupyter_config_dir, jupyter_data_dir
import os
import sys
sys.path.append(os.path.join(jupyter_data_dir(), 'extensions'))
c = get_config()
c.Exporter.template_path = [ '.', os.path.join(jupyter_data_dir(), 'templates') ]
| Add local path to exporter template path | Add local path to exporter template path
| Python | bsd-3-clause | benvarkey/IPython-notebook-extensions,jbn/IPython-notebook-extensions,juhasch/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,juhasch/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,jbn/IPython-notebook-extensions,benvarkey/IPython-notebook-extensions,juhasch/IPython-notebook-extensions,benvarkey/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,jbn/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,jbn/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,benvarkey/IPython-notebook-extensions | from jupyter_core.paths import jupyter_config_dir, jupyter_data_dir
import os
import sys
sys.path.append(os.path.join(jupyter_data_dir(), 'extensions'))
c = get_config()
c.Exporter.template_path = [os.path.join(jupyter_data_dir(), 'templates') ]
Add local path to exporter template path | from jupyter_core.paths import jupyter_config_dir, jupyter_data_dir
import os
import sys
sys.path.append(os.path.join(jupyter_data_dir(), 'extensions'))
c = get_config()
c.Exporter.template_path = [ '.', os.path.join(jupyter_data_dir(), 'templates') ]
| <commit_before>from jupyter_core.paths import jupyter_config_dir, jupyter_data_dir
import os
import sys
sys.path.append(os.path.join(jupyter_data_dir(), 'extensions'))
c = get_config()
c.Exporter.template_path = [os.path.join(jupyter_data_dir(), 'templates') ]
<commit_msg>Add local path to exporter template path<commit_after> | from jupyter_core.paths import jupyter_config_dir, jupyter_data_dir
import os
import sys
sys.path.append(os.path.join(jupyter_data_dir(), 'extensions'))
c = get_config()
c.Exporter.template_path = [ '.', os.path.join(jupyter_data_dir(), 'templates') ]
| from jupyter_core.paths import jupyter_config_dir, jupyter_data_dir
import os
import sys
sys.path.append(os.path.join(jupyter_data_dir(), 'extensions'))
c = get_config()
c.Exporter.template_path = [os.path.join(jupyter_data_dir(), 'templates') ]
Add local path to exporter template pathfrom jupyter_core.paths import jupyter_config_dir, jupyter_data_dir
import os
import sys
sys.path.append(os.path.join(jupyter_data_dir(), 'extensions'))
c = get_config()
c.Exporter.template_path = [ '.', os.path.join(jupyter_data_dir(), 'templates') ]
| <commit_before>from jupyter_core.paths import jupyter_config_dir, jupyter_data_dir
import os
import sys
sys.path.append(os.path.join(jupyter_data_dir(), 'extensions'))
c = get_config()
c.Exporter.template_path = [os.path.join(jupyter_data_dir(), 'templates') ]
<commit_msg>Add local path to exporter template path<commit_after>from jupyter_core.paths import jupyter_config_dir, jupyter_data_dir
import os
import sys
sys.path.append(os.path.join(jupyter_data_dir(), 'extensions'))
c = get_config()
c.Exporter.template_path = [ '.', os.path.join(jupyter_data_dir(), 'templates') ]
|
b91a7df56119c14c0d38f8c3654453d7e8e317d2 | elm_format.py | elm_format.py | from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, sterr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + output.strip(), 'errors: ' + errors.strip())
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
| from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + str(output.strip()), 'errors: ' + str(errors.strip()))
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
| Fix decoding bug on log | Fix decoding bug on log | Python | mit | deadfoxygrandpa/Elm.tmLanguage,sekjun9878/Elm.tmLanguage,sekjun9878/Elm.tmLanguage,deadfoxygrandpa/Elm.tmLanguage | from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, sterr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + output.strip(), 'errors: ' + errors.strip())
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
Fix decoding bug on log | from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + str(output.strip()), 'errors: ' + str(errors.strip()))
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
| <commit_before>from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, sterr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + output.strip(), 'errors: ' + errors.strip())
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
<commit_msg>Fix decoding bug on log<commit_after> | from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + str(output.strip()), 'errors: ' + str(errors.strip()))
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
| from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, sterr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + output.strip(), 'errors: ' + errors.strip())
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
Fix decoding bug on logfrom __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + str(output.strip()), 'errors: ' + str(errors.strip()))
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
| <commit_before>from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, sterr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + output.strip(), 'errors: ' + errors.strip())
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
<commit_msg>Fix decoding bug on log<commit_after>from __future__ import print_function
import subprocess
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
command = "elm-format {} --yes".format(self.view.file_name())
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + str(output.strip()), 'errors: ' + str(errors.strip()))
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
|
e1fc18a3b342023e4bb05dc2ffc77a5fc8cc9969 | scipy/spatial/setupscons.py | scipy/spatial/setupscons.py | #!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('cluster', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
| #!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('spatial', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
| Update setup.py file for numscons build. | Update setup.py file for numscons build.
| Python | bsd-3-clause | larsmans/scipy,zerothi/scipy,aman-iitj/scipy,rgommers/scipy,niknow/scipy,gertingold/scipy,sonnyhu/scipy,WillieMaddox/scipy,giorgiop/scipy,sriki18/scipy,vigna/scipy,bkendzior/scipy,sargas/scipy,gef756/scipy,Kamp9/scipy,FRidh/scipy,njwilson23/scipy,mortada/scipy,FRidh/scipy,maciejkula/scipy,aman-iitj/scipy,vanpact/scipy,jsilter/scipy,scipy/scipy,rgommers/scipy,juliantaylor/scipy,futurulus/scipy,matthew-brett/scipy,felipebetancur/scipy,lhilt/scipy,larsmans/scipy,nvoron23/scipy,vanpact/scipy,maniteja123/scipy,Kamp9/scipy,jjhelmus/scipy,anielsen001/scipy,pyramania/scipy,niknow/scipy,argriffing/scipy,matthewalbani/scipy,surhudm/scipy,pnedunuri/scipy,kalvdans/scipy,aarchiba/scipy,petebachant/scipy,Eric89GXL/scipy,pyramania/scipy,richardotis/scipy,pschella/scipy,sargas/scipy,nonhermitian/scipy,Eric89GXL/scipy,behzadnouri/scipy,Eric89GXL/scipy,Srisai85/scipy,gdooper/scipy,Stefan-Endres/scipy,raoulbq/scipy,aarchiba/scipy,raoulbq/scipy,juliantaylor/scipy,ilayn/scipy,jseabold/scipy,sonnyhu/scipy,bkendzior/scipy,fernand/scipy,dch312/scipy,mingwpy/scipy,perimosocordiae/scipy,mortonjt/scipy,juliantaylor/scipy,kalvdans/scipy,vhaasteren/scipy,piyush0609/scipy,mingwpy/scipy,FRidh/scipy,maniteja123/scipy,kleskjr/scipy,Newman101/scipy,bkendzior/scipy,lukauskas/scipy,mortada/scipy,Eric89GXL/scipy,teoliphant/scipy,argriffing/scipy,pizzathief/scipy,mdhaber/scipy,cpaulik/scipy,mortonjt/scipy,gertingold/scipy,anielsen001/scipy,sriki18/scipy,efiring/scipy,mingwpy/scipy,perimosocordiae/scipy,vberaudi/scipy,mortonjt/scipy,matthewalbani/scipy,ilayn/scipy,vberaudi/scipy,newemailjdm/scipy,andim/scipy,josephcslater/scipy,Shaswat27/scipy,ogrisel/scipy,pyramania/scipy,WillieMaddox/scipy,vanpact/scipy,lukauskas/scipy,ChanderG/scipy,ortylp/scipy,tylerjereddy/scipy,person142/scipy,gef756/scipy,fernand/scipy,pschella/scipy,e-q/scipy,jsilter/scipy,nonhermitian/scipy,pnedunuri/scipy,behzadnouri/scipy,jakevdp/scipy,lukauskas/scipy,efiring/scipy,cpaulik/scipy,juliantaylor/scipy,arokem/scipy,mdhaber/scipy,andyfaff/scipy,vberaudi/scipy,aeklant/scipy,Stefan-Endres/scipy,fredrikw/scipy,kleskjr/scipy,trankmichael/scipy,aeklant/scipy,matthew-brett/scipy,vanpact/scipy,larsmans/scipy,gfyoung/scipy,Shaswat27/scipy,WillieMaddox/scipy,mortada/scipy,scipy/scipy,Dapid/scipy,ndchorley/scipy,Dapid/scipy,raoulbq/scipy,petebachant/scipy,aarchiba/scipy,sargas/scipy,ChanderG/scipy,anntzer/scipy,hainm/scipy,endolith/scipy,jamestwebber/scipy,mikebenfield/scipy,andim/scipy,richardotis/scipy,endolith/scipy,mtrbean/scipy,mortada/scipy,endolith/scipy,tylerjereddy/scipy,bkendzior/scipy,giorgiop/scipy,ortylp/scipy,pbrod/scipy,behzadnouri/scipy,piyush0609/scipy,mingwpy/scipy,mhogg/scipy,aeklant/scipy,larsmans/scipy,richardotis/scipy,ortylp/scipy,trankmichael/scipy,felipebetancur/scipy,pbrod/scipy,richardotis/scipy,gef756/scipy,petebachant/scipy,njwilson23/scipy,minhlongdo/scipy,mhogg/scipy,apbard/scipy,rmcgibbo/scipy,behzadnouri/scipy,kleskjr/scipy,pschella/scipy,matthew-brett/scipy,giorgiop/scipy,hainm/scipy,ales-erjavec/scipy,fernand/scipy,ndchorley/scipy,sriki18/scipy,scipy/scipy,WarrenWeckesser/scipy,vigna/scipy,jonycgn/scipy,vhaasteren/scipy,jonycgn/scipy,petebachant/scipy,ortylp/scipy,person142/scipy,andyfaff/scipy,ales-erjavec/scipy,grlee77/scipy,e-q/scipy,mdhaber/scipy,person142/scipy,witcxc/scipy,trankmichael/scipy,chatcannon/scipy,maniteja123/scipy,lhilt/scipy,vigna/scipy,jseabold/scipy,ales-erjavec/scipy,endolith/scipy,nmayorov/scipy,fredrikw/scipy,Kamp9/scipy,mgaitan/scipy,zerothi/scipy,pschella/scipy,teoliphant/scipy,lukauskas/scipy,fredrikw/scipy,mhogg/scipy,sauliusl/scipy,Gillu13/scipy,dominicelse/scipy,Newman101/scipy,jakevdp/scipy,grlee77/scipy,dominicelse/scipy,teoliphant/scipy,jsilter/scipy,pyramania/scipy,perimosocordiae/scipy,grlee77/scipy,woodscn/scipy,jjhelmus/scipy,minhlongdo/scipy,zaxliu/scipy,nvoron23/scipy,ChanderG/scipy,tylerjereddy/scipy,haudren/scipy,mortonjt/scipy,richardotis/scipy,gertingold/scipy,rgommers/scipy,richardotis/scipy,mikebenfield/scipy,endolith/scipy,FRidh/scipy,sargas/scipy,FRidh/scipy,mortada/scipy,cpaulik/scipy,zxsted/scipy,jamestwebber/scipy,sonnyhu/scipy,witcxc/scipy,mingwpy/scipy,chatcannon/scipy,teoliphant/scipy,sauliusl/scipy,Stefan-Endres/scipy,teoliphant/scipy,ales-erjavec/scipy,mtrbean/scipy,mortada/scipy,befelix/scipy,rmcgibbo/scipy,ogrisel/scipy,Dapid/scipy,lhilt/scipy,andim/scipy,apbard/scipy,maniteja123/scipy,Newman101/scipy,lukauskas/scipy,zaxliu/scipy,grlee77/scipy,andim/scipy,newemailjdm/scipy,vhaasteren/scipy,argriffing/scipy,dch312/scipy,kleskjr/scipy,raoulbq/scipy,raoulbq/scipy,pnedunuri/scipy,jsilter/scipy,jamestwebber/scipy,Stefan-Endres/scipy,Shaswat27/scipy,jonycgn/scipy,Dapid/scipy,zxsted/scipy,lhilt/scipy,scipy/scipy,andim/scipy,Srisai85/scipy,mdhaber/scipy,felipebetancur/scipy,rmcgibbo/scipy,WarrenWeckesser/scipy,jjhelmus/scipy,mtrbean/scipy,apbard/scipy,gdooper/scipy,ogrisel/scipy,gdooper/scipy,jonycgn/scipy,jamestwebber/scipy,WarrenWeckesser/scipy,vberaudi/scipy,anielsen001/scipy,futurulus/scipy,chatcannon/scipy,josephcslater/scipy,haudren/scipy,zxsted/scipy,hainm/scipy,zaxliu/scipy,Stefan-Endres/scipy,surhudm/scipy,woodscn/scipy,pyramania/scipy,vanpact/scipy,ortylp/scipy,anntzer/scipy,newemailjdm/scipy,jor-/scipy,andyfaff/scipy,piyush0609/scipy,jseabold/scipy,haudren/scipy,mortonjt/scipy,ogrisel/scipy,surhudm/scipy,sauliusl/scipy,efiring/scipy,trankmichael/scipy,e-q/scipy,Shaswat27/scipy,Gillu13/scipy,njwilson23/scipy,jamestwebber/scipy,gfyoung/scipy,ilayn/scipy,vhaasteren/scipy,scipy/scipy,rmcgibbo/scipy,zaxliu/scipy,matthew-brett/scipy,matthew-brett/scipy,woodscn/scipy,fernand/scipy,cpaulik/scipy,sriki18/scipy,jonycgn/scipy,nonhermitian/scipy,andyfaff/scipy,kalvdans/scipy,perimosocordiae/scipy,tylerjereddy/scipy,Eric89GXL/scipy,Gillu13/scipy,mikebenfield/scipy,jor-/scipy,Srisai85/scipy,fredrikw/scipy,ortylp/scipy,arokem/scipy,chatcannon/scipy,pizzathief/scipy,person142/scipy,maniteja123/scipy,ndchorley/scipy,jseabold/scipy,Newman101/scipy,dominicelse/scipy,befelix/scipy,WarrenWeckesser/scipy,Dapid/scipy,mikebenfield/scipy,anielsen001/scipy,pizzathief/scipy,fernand/scipy,mhogg/scipy,vanpact/scipy,jakevdp/scipy,ales-erjavec/scipy,haudren/scipy,gdooper/scipy,sonnyhu/scipy,pnedunuri/scipy,WarrenWeckesser/scipy,argriffing/scipy,Gillu13/scipy,jor-/scipy,pbrod/scipy,pizzathief/scipy,pbrod/scipy,woodscn/scipy,pbrod/scipy,trankmichael/scipy,zxsted/scipy,mdhaber/scipy,mgaitan/scipy,e-q/scipy,anielsen001/scipy,surhudm/scipy,aman-iitj/scipy,andim/scipy,anntzer/scipy,gfyoung/scipy,tylerjereddy/scipy,njwilson23/scipy,anielsen001/scipy,mgaitan/scipy,rmcgibbo/scipy,sonnyhu/scipy,apbard/scipy,WillieMaddox/scipy,efiring/scipy,aman-iitj/scipy,sargas/scipy,gef756/scipy,sriki18/scipy,andyfaff/scipy,witcxc/scipy,raoulbq/scipy,mhogg/scipy,cpaulik/scipy,jonycgn/scipy,arokem/scipy,newemailjdm/scipy,zerothi/scipy,ndchorley/scipy,vhaasteren/scipy,Newman101/scipy,gfyoung/scipy,Srisai85/scipy,minhlongdo/scipy,nvoron23/scipy,witcxc/scipy,larsmans/scipy,mingwpy/scipy,chatcannon/scipy,josephcslater/scipy,nvoron23/scipy,Newman101/scipy,matthewalbani/scipy,anntzer/scipy,nvoron23/scipy,niknow/scipy,mortonjt/scipy,lhilt/scipy,perimosocordiae/scipy,newemailjdm/scipy,maciejkula/scipy,dch312/scipy,mtrbean/scipy,matthewalbani/scipy,jjhelmus/scipy,mgaitan/scipy,WillieMaddox/scipy,petebachant/scipy,maciejkula/scipy,ales-erjavec/scipy,andyfaff/scipy,zerothi/scipy,matthewalbani/scipy,ilayn/scipy,rmcgibbo/scipy,apbard/scipy,vigna/scipy,fredrikw/scipy,njwilson23/scipy,Srisai85/scipy,Eric89GXL/scipy,woodscn/scipy,nonhermitian/scipy,ChanderG/scipy,futurulus/scipy,josephcslater/scipy,piyush0609/scipy,arokem/scipy,niknow/scipy,Shaswat27/scipy,person142/scipy,minhlongdo/scipy,piyush0609/scipy,chatcannon/scipy,dch312/scipy,nonhermitian/scipy,rgommers/scipy,aman-iitj/scipy,Srisai85/scipy,rgommers/scipy,witcxc/scipy,behzadnouri/scipy,WarrenWeckesser/scipy,sauliusl/scipy,futurulus/scipy,aarchiba/scipy,pnedunuri/scipy,kleskjr/scipy,surhudm/scipy,zerothi/scipy,aeklant/scipy,woodscn/scipy,jakevdp/scipy,zxsted/scipy,Dapid/scipy,Kamp9/scipy,zaxliu/scipy,Gillu13/scipy,Stefan-Endres/scipy,fernand/scipy,arokem/scipy,mdhaber/scipy,zaxliu/scipy,jseabold/scipy,dch312/scipy,vigna/scipy,nmayorov/scipy,jseabold/scipy,pbrod/scipy,scipy/scipy,maciejkula/scipy,aeklant/scipy,Gillu13/scipy,felipebetancur/scipy,surhudm/scipy,kleskjr/scipy,vberaudi/scipy,hainm/scipy,ndchorley/scipy,bkendzior/scipy,befelix/scipy,anntzer/scipy,niknow/scipy,anntzer/scipy,nmayorov/scipy,aman-iitj/scipy,argriffing/scipy,niknow/scipy,felipebetancur/scipy,gef756/scipy,haudren/scipy,Kamp9/scipy,aarchiba/scipy,gertingold/scipy,jor-/scipy,ChanderG/scipy,ndchorley/scipy,haudren/scipy,sauliusl/scipy,hainm/scipy,fredrikw/scipy,piyush0609/scipy,jjhelmus/scipy,pschella/scipy,nvoron23/scipy,mtrbean/scipy,futurulus/scipy,gfyoung/scipy,mikebenfield/scipy,josephcslater/scipy,efiring/scipy,efiring/scipy,mhogg/scipy,behzadnouri/scipy,argriffing/scipy,sonnyhu/scipy,mtrbean/scipy,ilayn/scipy,newemailjdm/scipy,endolith/scipy,pizzathief/scipy,FRidh/scipy,juliantaylor/scipy,lukauskas/scipy,giorgiop/scipy,dominicelse/scipy,vberaudi/scipy,perimosocordiae/scipy,ogrisel/scipy,zerothi/scipy,maniteja123/scipy,vhaasteren/scipy,petebachant/scipy,WillieMaddox/scipy,befelix/scipy,futurulus/scipy,mgaitan/scipy,minhlongdo/scipy,sriki18/scipy,grlee77/scipy,ChanderG/scipy,zxsted/scipy,pnedunuri/scipy,giorgiop/scipy,Shaswat27/scipy,giorgiop/scipy,kalvdans/scipy,sauliusl/scipy,gdooper/scipy,gertingold/scipy,hainm/scipy,nmayorov/scipy,cpaulik/scipy,nmayorov/scipy,larsmans/scipy,trankmichael/scipy,e-q/scipy,jor-/scipy,mgaitan/scipy,kalvdans/scipy,jsilter/scipy,dominicelse/scipy,jakevdp/scipy,gef756/scipy,befelix/scipy,ilayn/scipy,felipebetancur/scipy,maciejkula/scipy,njwilson23/scipy,minhlongdo/scipy,Kamp9/scipy | #!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('cluster', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
Update setup.py file for numscons build. | #!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('spatial', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
| <commit_before>#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('cluster', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
<commit_msg>Update setup.py file for numscons build.<commit_after> | #!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('spatial', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
| #!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('cluster', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
Update setup.py file for numscons build.#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('spatial', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
| <commit_before>#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('cluster', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
<commit_msg>Update setup.py file for numscons build.<commit_after>#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('spatial', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
|
fb7766a4155a229f31af0e33d1b2aedc1d2ff380 | myip/views.py | myip/views.py | from flask import Blueprint, request, jsonify, render_template
bp = Blueprint('views', __name__, url_prefix='')
@bp.route('/ip', methods=['GET'])
def get_ip():
return jsonify(ip=request.remote_addr)
@bp.route('/', methods=['GET'])
def index():
return render_template('index.html')
| from flask import Blueprint, request, render_template
bp = Blueprint('views', __name__, url_prefix='')
@bp.route('/', methods=['GET'])
def index():
return render_template('index.html', ip=request.remote_addr)
| Remove json route for retrieving the ip. | Remove json route for retrieving the ip.
| Python | mit | brotatos/myip,brotatos/myip | from flask import Blueprint, request, jsonify, render_template
bp = Blueprint('views', __name__, url_prefix='')
@bp.route('/ip', methods=['GET'])
def get_ip():
return jsonify(ip=request.remote_addr)
@bp.route('/', methods=['GET'])
def index():
return render_template('index.html')
Remove json route for retrieving the ip. | from flask import Blueprint, request, render_template
bp = Blueprint('views', __name__, url_prefix='')
@bp.route('/', methods=['GET'])
def index():
return render_template('index.html', ip=request.remote_addr)
| <commit_before>from flask import Blueprint, request, jsonify, render_template
bp = Blueprint('views', __name__, url_prefix='')
@bp.route('/ip', methods=['GET'])
def get_ip():
return jsonify(ip=request.remote_addr)
@bp.route('/', methods=['GET'])
def index():
return render_template('index.html')
<commit_msg>Remove json route for retrieving the ip.<commit_after> | from flask import Blueprint, request, render_template
bp = Blueprint('views', __name__, url_prefix='')
@bp.route('/', methods=['GET'])
def index():
return render_template('index.html', ip=request.remote_addr)
| from flask import Blueprint, request, jsonify, render_template
bp = Blueprint('views', __name__, url_prefix='')
@bp.route('/ip', methods=['GET'])
def get_ip():
return jsonify(ip=request.remote_addr)
@bp.route('/', methods=['GET'])
def index():
return render_template('index.html')
Remove json route for retrieving the ip.from flask import Blueprint, request, render_template
bp = Blueprint('views', __name__, url_prefix='')
@bp.route('/', methods=['GET'])
def index():
return render_template('index.html', ip=request.remote_addr)
| <commit_before>from flask import Blueprint, request, jsonify, render_template
bp = Blueprint('views', __name__, url_prefix='')
@bp.route('/ip', methods=['GET'])
def get_ip():
return jsonify(ip=request.remote_addr)
@bp.route('/', methods=['GET'])
def index():
return render_template('index.html')
<commit_msg>Remove json route for retrieving the ip.<commit_after>from flask import Blueprint, request, render_template
bp = Blueprint('views', __name__, url_prefix='')
@bp.route('/', methods=['GET'])
def index():
return render_template('index.html', ip=request.remote_addr)
|
273f9842bbe407e2e4548c712fed8c709c29dd0a | examples/cassandra_db.py | examples/cassandra_db.py | """Cassandra database example
This example demonstrates connecting to a Cassandra database and executing a query. Note that
using the database driver remains exactly the same. The only difference is that we're
monkey-patching everything (including the Cassandra driver), making it guv-friendly.
Adjust this example to your database address, keyspace, and query that you would like to run.
"""
import logger
logger.configure()
import guv
guv.monkey_patch()
import logging
from cassandra import cluster
log = logging.getLogger()
def main():
nodes = ['192.168.20.2']
c = cluster.Cluster(nodes, port=9042)
session = c.connect('test')
log.info('Execute commands')
rows = session.execute('SELECT * FROM numbers')
for row in rows:
log.info(row)
log.warn('Cassandra support is currently incomplete and buggy.')
c.shutdown()
if __name__ == '__main__':
main()
| """Cassandra database example
This example demonstrates connecting to a Cassandra database and executing a query. Note that
using the database driver remains exactly the same. The only difference is that we're
monkey-patching everything (including the Cassandra driver), making it guv-friendly.
Adjust this example to your database address, keyspace, and query that you would like to run.
"""
import guv
guv.monkey_patch()
import logger
logger.configure()
import logging
from cassandra import cluster
log = logging.getLogger()
def main():
nodes = ['192.168.20.2']
c = cluster.Cluster(nodes, port=9042)
session = c.connect('test')
log.info('Execute commands')
rows = session.execute('SELECT * FROM numbers')
for row in rows:
log.info(row)
c.shutdown()
if __name__ == '__main__':
main()
| Remove warning message (now fixed) | Remove warning message (now fixed)
| Python | mit | veegee/guv,veegee/guv | """Cassandra database example
This example demonstrates connecting to a Cassandra database and executing a query. Note that
using the database driver remains exactly the same. The only difference is that we're
monkey-patching everything (including the Cassandra driver), making it guv-friendly.
Adjust this example to your database address, keyspace, and query that you would like to run.
"""
import logger
logger.configure()
import guv
guv.monkey_patch()
import logging
from cassandra import cluster
log = logging.getLogger()
def main():
nodes = ['192.168.20.2']
c = cluster.Cluster(nodes, port=9042)
session = c.connect('test')
log.info('Execute commands')
rows = session.execute('SELECT * FROM numbers')
for row in rows:
log.info(row)
log.warn('Cassandra support is currently incomplete and buggy.')
c.shutdown()
if __name__ == '__main__':
main()
Remove warning message (now fixed) | """Cassandra database example
This example demonstrates connecting to a Cassandra database and executing a query. Note that
using the database driver remains exactly the same. The only difference is that we're
monkey-patching everything (including the Cassandra driver), making it guv-friendly.
Adjust this example to your database address, keyspace, and query that you would like to run.
"""
import guv
guv.monkey_patch()
import logger
logger.configure()
import logging
from cassandra import cluster
log = logging.getLogger()
def main():
nodes = ['192.168.20.2']
c = cluster.Cluster(nodes, port=9042)
session = c.connect('test')
log.info('Execute commands')
rows = session.execute('SELECT * FROM numbers')
for row in rows:
log.info(row)
c.shutdown()
if __name__ == '__main__':
main()
| <commit_before>"""Cassandra database example
This example demonstrates connecting to a Cassandra database and executing a query. Note that
using the database driver remains exactly the same. The only difference is that we're
monkey-patching everything (including the Cassandra driver), making it guv-friendly.
Adjust this example to your database address, keyspace, and query that you would like to run.
"""
import logger
logger.configure()
import guv
guv.monkey_patch()
import logging
from cassandra import cluster
log = logging.getLogger()
def main():
nodes = ['192.168.20.2']
c = cluster.Cluster(nodes, port=9042)
session = c.connect('test')
log.info('Execute commands')
rows = session.execute('SELECT * FROM numbers')
for row in rows:
log.info(row)
log.warn('Cassandra support is currently incomplete and buggy.')
c.shutdown()
if __name__ == '__main__':
main()
<commit_msg>Remove warning message (now fixed)<commit_after> | """Cassandra database example
This example demonstrates connecting to a Cassandra database and executing a query. Note that
using the database driver remains exactly the same. The only difference is that we're
monkey-patching everything (including the Cassandra driver), making it guv-friendly.
Adjust this example to your database address, keyspace, and query that you would like to run.
"""
import guv
guv.monkey_patch()
import logger
logger.configure()
import logging
from cassandra import cluster
log = logging.getLogger()
def main():
nodes = ['192.168.20.2']
c = cluster.Cluster(nodes, port=9042)
session = c.connect('test')
log.info('Execute commands')
rows = session.execute('SELECT * FROM numbers')
for row in rows:
log.info(row)
c.shutdown()
if __name__ == '__main__':
main()
| """Cassandra database example
This example demonstrates connecting to a Cassandra database and executing a query. Note that
using the database driver remains exactly the same. The only difference is that we're
monkey-patching everything (including the Cassandra driver), making it guv-friendly.
Adjust this example to your database address, keyspace, and query that you would like to run.
"""
import logger
logger.configure()
import guv
guv.monkey_patch()
import logging
from cassandra import cluster
log = logging.getLogger()
def main():
nodes = ['192.168.20.2']
c = cluster.Cluster(nodes, port=9042)
session = c.connect('test')
log.info('Execute commands')
rows = session.execute('SELECT * FROM numbers')
for row in rows:
log.info(row)
log.warn('Cassandra support is currently incomplete and buggy.')
c.shutdown()
if __name__ == '__main__':
main()
Remove warning message (now fixed)"""Cassandra database example
This example demonstrates connecting to a Cassandra database and executing a query. Note that
using the database driver remains exactly the same. The only difference is that we're
monkey-patching everything (including the Cassandra driver), making it guv-friendly.
Adjust this example to your database address, keyspace, and query that you would like to run.
"""
import guv
guv.monkey_patch()
import logger
logger.configure()
import logging
from cassandra import cluster
log = logging.getLogger()
def main():
nodes = ['192.168.20.2']
c = cluster.Cluster(nodes, port=9042)
session = c.connect('test')
log.info('Execute commands')
rows = session.execute('SELECT * FROM numbers')
for row in rows:
log.info(row)
c.shutdown()
if __name__ == '__main__':
main()
| <commit_before>"""Cassandra database example
This example demonstrates connecting to a Cassandra database and executing a query. Note that
using the database driver remains exactly the same. The only difference is that we're
monkey-patching everything (including the Cassandra driver), making it guv-friendly.
Adjust this example to your database address, keyspace, and query that you would like to run.
"""
import logger
logger.configure()
import guv
guv.monkey_patch()
import logging
from cassandra import cluster
log = logging.getLogger()
def main():
nodes = ['192.168.20.2']
c = cluster.Cluster(nodes, port=9042)
session = c.connect('test')
log.info('Execute commands')
rows = session.execute('SELECT * FROM numbers')
for row in rows:
log.info(row)
log.warn('Cassandra support is currently incomplete and buggy.')
c.shutdown()
if __name__ == '__main__':
main()
<commit_msg>Remove warning message (now fixed)<commit_after>"""Cassandra database example
This example demonstrates connecting to a Cassandra database and executing a query. Note that
using the database driver remains exactly the same. The only difference is that we're
monkey-patching everything (including the Cassandra driver), making it guv-friendly.
Adjust this example to your database address, keyspace, and query that you would like to run.
"""
import guv
guv.monkey_patch()
import logger
logger.configure()
import logging
from cassandra import cluster
log = logging.getLogger()
def main():
nodes = ['192.168.20.2']
c = cluster.Cluster(nodes, port=9042)
session = c.connect('test')
log.info('Execute commands')
rows = session.execute('SELECT * FROM numbers')
for row in rows:
log.info(row)
c.shutdown()
if __name__ == '__main__':
main()
|
8aab29ad2a9f4d4b89ca3e1e54894ccc7a9a6c68 | django_jinja/cache.py | django_jinja/cache.py | # -*- coding: utf-8 -*-
import django
from django.utils.functional import cached_property
from jinja2 import BytecodeCache as _BytecodeCache
class BytecodeCache(_BytecodeCache):
"""
A bytecode cache for Jinja2 that uses Django's caching framework.
"""
def __init__(self, cache_name):
self._cache_name = cache_name
@cached_property
def backend(self):
if django.VERSION[:2] < (1, 8):
from django.core.cache import get_cache
return get_cache(self._cache_name)
else:
from django.core.cache import caches
return caches[self._cache_name]
def load_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
bytecode = self.backend.get(key)
if bytecode:
bucket.bytecode_from_string(bytecode)
def dump_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
self.backend.set(key, bucket.bytecode_to_string())
| # -*- coding: utf-8 -*-
import django
from django.utils.functional import cached_property
from jinja2 import BytecodeCache as _BytecodeCache
class BytecodeCache(_BytecodeCache):
"""
A bytecode cache for Jinja2 that uses Django's caching framework.
"""
def __init__(self, cache_name):
self._cache_name = cache_name
@cached_property
def backend(self):
from django.core.cache import caches
return caches[self._cache_name]
def load_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
bytecode = self.backend.get(key)
if bytecode:
bucket.bytecode_from_string(bytecode)
def dump_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
self.backend.set(key, bucket.bytecode_to_string())
| Remove unnecessary Django < 1.8 check | Remove unnecessary Django < 1.8 check
Support for old Djangos was dropped in 4f9df0f7b764eda520b2f0428da798db02f66d97
| Python | bsd-3-clause | akx/django-jinja,niwinz/django-jinja,akx/django-jinja,akx/django-jinja,niwinz/django-jinja,akx/django-jinja,niwinz/django-jinja | # -*- coding: utf-8 -*-
import django
from django.utils.functional import cached_property
from jinja2 import BytecodeCache as _BytecodeCache
class BytecodeCache(_BytecodeCache):
"""
A bytecode cache for Jinja2 that uses Django's caching framework.
"""
def __init__(self, cache_name):
self._cache_name = cache_name
@cached_property
def backend(self):
if django.VERSION[:2] < (1, 8):
from django.core.cache import get_cache
return get_cache(self._cache_name)
else:
from django.core.cache import caches
return caches[self._cache_name]
def load_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
bytecode = self.backend.get(key)
if bytecode:
bucket.bytecode_from_string(bytecode)
def dump_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
self.backend.set(key, bucket.bytecode_to_string())
Remove unnecessary Django < 1.8 check
Support for old Djangos was dropped in 4f9df0f7b764eda520b2f0428da798db02f66d97 | # -*- coding: utf-8 -*-
import django
from django.utils.functional import cached_property
from jinja2 import BytecodeCache as _BytecodeCache
class BytecodeCache(_BytecodeCache):
"""
A bytecode cache for Jinja2 that uses Django's caching framework.
"""
def __init__(self, cache_name):
self._cache_name = cache_name
@cached_property
def backend(self):
from django.core.cache import caches
return caches[self._cache_name]
def load_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
bytecode = self.backend.get(key)
if bytecode:
bucket.bytecode_from_string(bytecode)
def dump_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
self.backend.set(key, bucket.bytecode_to_string())
| <commit_before># -*- coding: utf-8 -*-
import django
from django.utils.functional import cached_property
from jinja2 import BytecodeCache as _BytecodeCache
class BytecodeCache(_BytecodeCache):
"""
A bytecode cache for Jinja2 that uses Django's caching framework.
"""
def __init__(self, cache_name):
self._cache_name = cache_name
@cached_property
def backend(self):
if django.VERSION[:2] < (1, 8):
from django.core.cache import get_cache
return get_cache(self._cache_name)
else:
from django.core.cache import caches
return caches[self._cache_name]
def load_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
bytecode = self.backend.get(key)
if bytecode:
bucket.bytecode_from_string(bytecode)
def dump_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
self.backend.set(key, bucket.bytecode_to_string())
<commit_msg>Remove unnecessary Django < 1.8 check
Support for old Djangos was dropped in 4f9df0f7b764eda520b2f0428da798db02f66d97<commit_after> | # -*- coding: utf-8 -*-
import django
from django.utils.functional import cached_property
from jinja2 import BytecodeCache as _BytecodeCache
class BytecodeCache(_BytecodeCache):
"""
A bytecode cache for Jinja2 that uses Django's caching framework.
"""
def __init__(self, cache_name):
self._cache_name = cache_name
@cached_property
def backend(self):
from django.core.cache import caches
return caches[self._cache_name]
def load_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
bytecode = self.backend.get(key)
if bytecode:
bucket.bytecode_from_string(bytecode)
def dump_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
self.backend.set(key, bucket.bytecode_to_string())
| # -*- coding: utf-8 -*-
import django
from django.utils.functional import cached_property
from jinja2 import BytecodeCache as _BytecodeCache
class BytecodeCache(_BytecodeCache):
"""
A bytecode cache for Jinja2 that uses Django's caching framework.
"""
def __init__(self, cache_name):
self._cache_name = cache_name
@cached_property
def backend(self):
if django.VERSION[:2] < (1, 8):
from django.core.cache import get_cache
return get_cache(self._cache_name)
else:
from django.core.cache import caches
return caches[self._cache_name]
def load_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
bytecode = self.backend.get(key)
if bytecode:
bucket.bytecode_from_string(bytecode)
def dump_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
self.backend.set(key, bucket.bytecode_to_string())
Remove unnecessary Django < 1.8 check
Support for old Djangos was dropped in 4f9df0f7b764eda520b2f0428da798db02f66d97# -*- coding: utf-8 -*-
import django
from django.utils.functional import cached_property
from jinja2 import BytecodeCache as _BytecodeCache
class BytecodeCache(_BytecodeCache):
"""
A bytecode cache for Jinja2 that uses Django's caching framework.
"""
def __init__(self, cache_name):
self._cache_name = cache_name
@cached_property
def backend(self):
from django.core.cache import caches
return caches[self._cache_name]
def load_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
bytecode = self.backend.get(key)
if bytecode:
bucket.bytecode_from_string(bytecode)
def dump_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
self.backend.set(key, bucket.bytecode_to_string())
| <commit_before># -*- coding: utf-8 -*-
import django
from django.utils.functional import cached_property
from jinja2 import BytecodeCache as _BytecodeCache
class BytecodeCache(_BytecodeCache):
"""
A bytecode cache for Jinja2 that uses Django's caching framework.
"""
def __init__(self, cache_name):
self._cache_name = cache_name
@cached_property
def backend(self):
if django.VERSION[:2] < (1, 8):
from django.core.cache import get_cache
return get_cache(self._cache_name)
else:
from django.core.cache import caches
return caches[self._cache_name]
def load_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
bytecode = self.backend.get(key)
if bytecode:
bucket.bytecode_from_string(bytecode)
def dump_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
self.backend.set(key, bucket.bytecode_to_string())
<commit_msg>Remove unnecessary Django < 1.8 check
Support for old Djangos was dropped in 4f9df0f7b764eda520b2f0428da798db02f66d97<commit_after># -*- coding: utf-8 -*-
import django
from django.utils.functional import cached_property
from jinja2 import BytecodeCache as _BytecodeCache
class BytecodeCache(_BytecodeCache):
"""
A bytecode cache for Jinja2 that uses Django's caching framework.
"""
def __init__(self, cache_name):
self._cache_name = cache_name
@cached_property
def backend(self):
from django.core.cache import caches
return caches[self._cache_name]
def load_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
bytecode = self.backend.get(key)
if bytecode:
bucket.bytecode_from_string(bytecode)
def dump_bytecode(self, bucket):
key = 'jinja2_%s' % str(bucket.key)
self.backend.set(key, bucket.bytecode_to_string())
|
61381417ff796bbb90ab3b17c366b700e8de1f7b | conjureup/controllers/credentials/tui.py | conjureup/controllers/credentials/tui.py | from conjureup import events, utils
from conjureup.app_config import app
from . import common
class CredentialsController(common.BaseCredentialsController):
def render(self):
if app.provider.cloud_type == 'localhost':
# no credentials required for localhost
self.finish()
elif not self.credentials:
utils.warning("You attempted to do an install against a cloud "
"that requires credentials that could not be "
"found. If you wish to supply those "
"credentials please run "
"`juju add-credential "
"{}`.".format(app.provider.cloud))
events.Shutdown.set(1)
elif not app.provider.credential:
utils.warning("You attempted to install against a cloud with "
"multiple credentials and no default credentials "
"set. Please set a default credential with:\n"
"\n"
" juju set-default-credential {} <credential>")
events.Shutdown.set(1)
else:
self.finish()
_controller_class = CredentialsController
| from conjureup import events, utils
from conjureup.app_config import app
from . import common
class CredentialsController(common.BaseCredentialsController):
def render(self):
if app.provider.cloud_type == 'lxd':
# no credentials required for localhost
self.finish()
elif not self.credentials:
utils.warning("You attempted to do an install against a cloud "
"that requires credentials that could not be "
"found. If you wish to supply those "
"credentials please run "
"`juju add-credential "
"{}`.".format(app.provider.cloud))
events.Shutdown.set(1)
elif not app.provider.credential:
utils.warning("You attempted to install against a cloud with "
"multiple credentials and no default credentials "
"set. Please set a default credential with:\n"
"\n"
" juju set-default-credential {} <credential>")
events.Shutdown.set(1)
else:
self.finish()
_controller_class = CredentialsController
| Correct cloud type checker in TUI for localhost | Correct cloud type checker in TUI for localhost
Signed-off-by: Adam Stokes <49c255c1d074742f60d19fdba5e2aa5a34add567@users.noreply.github.com>
| Python | mit | ubuntu/conjure-up,conjure-up/conjure-up,conjure-up/conjure-up,ubuntu/conjure-up,Ubuntu-Solutions-Engineering/conjure,Ubuntu-Solutions-Engineering/conjure | from conjureup import events, utils
from conjureup.app_config import app
from . import common
class CredentialsController(common.BaseCredentialsController):
def render(self):
if app.provider.cloud_type == 'localhost':
# no credentials required for localhost
self.finish()
elif not self.credentials:
utils.warning("You attempted to do an install against a cloud "
"that requires credentials that could not be "
"found. If you wish to supply those "
"credentials please run "
"`juju add-credential "
"{}`.".format(app.provider.cloud))
events.Shutdown.set(1)
elif not app.provider.credential:
utils.warning("You attempted to install against a cloud with "
"multiple credentials and no default credentials "
"set. Please set a default credential with:\n"
"\n"
" juju set-default-credential {} <credential>")
events.Shutdown.set(1)
else:
self.finish()
_controller_class = CredentialsController
Correct cloud type checker in TUI for localhost
Signed-off-by: Adam Stokes <49c255c1d074742f60d19fdba5e2aa5a34add567@users.noreply.github.com> | from conjureup import events, utils
from conjureup.app_config import app
from . import common
class CredentialsController(common.BaseCredentialsController):
def render(self):
if app.provider.cloud_type == 'lxd':
# no credentials required for localhost
self.finish()
elif not self.credentials:
utils.warning("You attempted to do an install against a cloud "
"that requires credentials that could not be "
"found. If you wish to supply those "
"credentials please run "
"`juju add-credential "
"{}`.".format(app.provider.cloud))
events.Shutdown.set(1)
elif not app.provider.credential:
utils.warning("You attempted to install against a cloud with "
"multiple credentials and no default credentials "
"set. Please set a default credential with:\n"
"\n"
" juju set-default-credential {} <credential>")
events.Shutdown.set(1)
else:
self.finish()
_controller_class = CredentialsController
| <commit_before>from conjureup import events, utils
from conjureup.app_config import app
from . import common
class CredentialsController(common.BaseCredentialsController):
def render(self):
if app.provider.cloud_type == 'localhost':
# no credentials required for localhost
self.finish()
elif not self.credentials:
utils.warning("You attempted to do an install against a cloud "
"that requires credentials that could not be "
"found. If you wish to supply those "
"credentials please run "
"`juju add-credential "
"{}`.".format(app.provider.cloud))
events.Shutdown.set(1)
elif not app.provider.credential:
utils.warning("You attempted to install against a cloud with "
"multiple credentials and no default credentials "
"set. Please set a default credential with:\n"
"\n"
" juju set-default-credential {} <credential>")
events.Shutdown.set(1)
else:
self.finish()
_controller_class = CredentialsController
<commit_msg>Correct cloud type checker in TUI for localhost
Signed-off-by: Adam Stokes <49c255c1d074742f60d19fdba5e2aa5a34add567@users.noreply.github.com><commit_after> | from conjureup import events, utils
from conjureup.app_config import app
from . import common
class CredentialsController(common.BaseCredentialsController):
def render(self):
if app.provider.cloud_type == 'lxd':
# no credentials required for localhost
self.finish()
elif not self.credentials:
utils.warning("You attempted to do an install against a cloud "
"that requires credentials that could not be "
"found. If you wish to supply those "
"credentials please run "
"`juju add-credential "
"{}`.".format(app.provider.cloud))
events.Shutdown.set(1)
elif not app.provider.credential:
utils.warning("You attempted to install against a cloud with "
"multiple credentials and no default credentials "
"set. Please set a default credential with:\n"
"\n"
" juju set-default-credential {} <credential>")
events.Shutdown.set(1)
else:
self.finish()
_controller_class = CredentialsController
| from conjureup import events, utils
from conjureup.app_config import app
from . import common
class CredentialsController(common.BaseCredentialsController):
def render(self):
if app.provider.cloud_type == 'localhost':
# no credentials required for localhost
self.finish()
elif not self.credentials:
utils.warning("You attempted to do an install against a cloud "
"that requires credentials that could not be "
"found. If you wish to supply those "
"credentials please run "
"`juju add-credential "
"{}`.".format(app.provider.cloud))
events.Shutdown.set(1)
elif not app.provider.credential:
utils.warning("You attempted to install against a cloud with "
"multiple credentials and no default credentials "
"set. Please set a default credential with:\n"
"\n"
" juju set-default-credential {} <credential>")
events.Shutdown.set(1)
else:
self.finish()
_controller_class = CredentialsController
Correct cloud type checker in TUI for localhost
Signed-off-by: Adam Stokes <49c255c1d074742f60d19fdba5e2aa5a34add567@users.noreply.github.com>from conjureup import events, utils
from conjureup.app_config import app
from . import common
class CredentialsController(common.BaseCredentialsController):
def render(self):
if app.provider.cloud_type == 'lxd':
# no credentials required for localhost
self.finish()
elif not self.credentials:
utils.warning("You attempted to do an install against a cloud "
"that requires credentials that could not be "
"found. If you wish to supply those "
"credentials please run "
"`juju add-credential "
"{}`.".format(app.provider.cloud))
events.Shutdown.set(1)
elif not app.provider.credential:
utils.warning("You attempted to install against a cloud with "
"multiple credentials and no default credentials "
"set. Please set a default credential with:\n"
"\n"
" juju set-default-credential {} <credential>")
events.Shutdown.set(1)
else:
self.finish()
_controller_class = CredentialsController
| <commit_before>from conjureup import events, utils
from conjureup.app_config import app
from . import common
class CredentialsController(common.BaseCredentialsController):
def render(self):
if app.provider.cloud_type == 'localhost':
# no credentials required for localhost
self.finish()
elif not self.credentials:
utils.warning("You attempted to do an install against a cloud "
"that requires credentials that could not be "
"found. If you wish to supply those "
"credentials please run "
"`juju add-credential "
"{}`.".format(app.provider.cloud))
events.Shutdown.set(1)
elif not app.provider.credential:
utils.warning("You attempted to install against a cloud with "
"multiple credentials and no default credentials "
"set. Please set a default credential with:\n"
"\n"
" juju set-default-credential {} <credential>")
events.Shutdown.set(1)
else:
self.finish()
_controller_class = CredentialsController
<commit_msg>Correct cloud type checker in TUI for localhost
Signed-off-by: Adam Stokes <49c255c1d074742f60d19fdba5e2aa5a34add567@users.noreply.github.com><commit_after>from conjureup import events, utils
from conjureup.app_config import app
from . import common
class CredentialsController(common.BaseCredentialsController):
def render(self):
if app.provider.cloud_type == 'lxd':
# no credentials required for localhost
self.finish()
elif not self.credentials:
utils.warning("You attempted to do an install against a cloud "
"that requires credentials that could not be "
"found. If you wish to supply those "
"credentials please run "
"`juju add-credential "
"{}`.".format(app.provider.cloud))
events.Shutdown.set(1)
elif not app.provider.credential:
utils.warning("You attempted to install against a cloud with "
"multiple credentials and no default credentials "
"set. Please set a default credential with:\n"
"\n"
" juju set-default-credential {} <credential>")
events.Shutdown.set(1)
else:
self.finish()
_controller_class = CredentialsController
|
81a5997227cb9c8086b3cebf305e539eb2bf1990 | daas.py | daas.py | from death_extractor import youtube as yt
from death_extractor import set_interval
from death_extractor import extract_and_upload
def death_as_a_service(vid_path = 'vids', post_interval=3600*2, search_interval=3600*12, dl_interval=3600*6, max_downloads=5, to_imgur=True, to_tumblr=True):
"""Run periodic search/download/extract_and_upload operations"""
#print "Fetching new videos and consolidating queue..."
#yt.populate_queue()
#print "Downloading up to",max_downloads,"videos..."
#yt.dl(max_downloads)
extract_and_upload(vid_path, to_imgur, to_tumblr)
if search_interval:
search_timer = set_interval(search_interval, yt.populate_queue)
if dl_interval:
dl_timer = set_interval(dl_interval, yt.dl, max_downloads)
if post_interval:
post_timer = set_interval(post_interval, extract_and_upload, vid_path, to_imgur, to_tumblr)
if __name__ == '__main__':
print "Running from console..."
death_as_a_service()
| import os
from death_extractor import youtube as yt
from death_extractor import set_interval
from death_extractor import extract_and_upload
def death_as_a_service(vid_path = 'vids', post_interval=3600*2, search_interval=3600*6, dl_interval=3600*3, max_downloads=4, to_imgur=True, to_tumblr=True):
"""Run periodic search/download/extract_and_upload operations"""
print "Fetching new videos and consolidating queue..."
yt.populate_queue()
if len([file for file in os.listdir(vid_path) if not file.endswith('part') and not file.startswith('.')]) < 4:
print "Downloading up to",max_downloads,"videos..."
yt.dl(max_downloads)
extract_and_upload(vid_path, to_imgur, to_tumblr)
if search_interval:
search_timer = set_interval(search_interval, yt.populate_queue)
if dl_interval:
dl_timer = set_interval(dl_interval, yt.dl, max_downloads)
if post_interval:
post_timer = set_interval(post_interval, extract_and_upload, vid_path, to_imgur, to_tumblr)
if __name__ == '__main__':
print "Running from console..."
death_as_a_service()
| Stop getting videos every time script starts | Stop getting videos every time script starts
| Python | mit | BooDoo/death_extractor | from death_extractor import youtube as yt
from death_extractor import set_interval
from death_extractor import extract_and_upload
def death_as_a_service(vid_path = 'vids', post_interval=3600*2, search_interval=3600*12, dl_interval=3600*6, max_downloads=5, to_imgur=True, to_tumblr=True):
"""Run periodic search/download/extract_and_upload operations"""
#print "Fetching new videos and consolidating queue..."
#yt.populate_queue()
#print "Downloading up to",max_downloads,"videos..."
#yt.dl(max_downloads)
extract_and_upload(vid_path, to_imgur, to_tumblr)
if search_interval:
search_timer = set_interval(search_interval, yt.populate_queue)
if dl_interval:
dl_timer = set_interval(dl_interval, yt.dl, max_downloads)
if post_interval:
post_timer = set_interval(post_interval, extract_and_upload, vid_path, to_imgur, to_tumblr)
if __name__ == '__main__':
print "Running from console..."
death_as_a_service()
Stop getting videos every time script starts | import os
from death_extractor import youtube as yt
from death_extractor import set_interval
from death_extractor import extract_and_upload
def death_as_a_service(vid_path = 'vids', post_interval=3600*2, search_interval=3600*6, dl_interval=3600*3, max_downloads=4, to_imgur=True, to_tumblr=True):
"""Run periodic search/download/extract_and_upload operations"""
print "Fetching new videos and consolidating queue..."
yt.populate_queue()
if len([file for file in os.listdir(vid_path) if not file.endswith('part') and not file.startswith('.')]) < 4:
print "Downloading up to",max_downloads,"videos..."
yt.dl(max_downloads)
extract_and_upload(vid_path, to_imgur, to_tumblr)
if search_interval:
search_timer = set_interval(search_interval, yt.populate_queue)
if dl_interval:
dl_timer = set_interval(dl_interval, yt.dl, max_downloads)
if post_interval:
post_timer = set_interval(post_interval, extract_and_upload, vid_path, to_imgur, to_tumblr)
if __name__ == '__main__':
print "Running from console..."
death_as_a_service()
| <commit_before>from death_extractor import youtube as yt
from death_extractor import set_interval
from death_extractor import extract_and_upload
def death_as_a_service(vid_path = 'vids', post_interval=3600*2, search_interval=3600*12, dl_interval=3600*6, max_downloads=5, to_imgur=True, to_tumblr=True):
"""Run periodic search/download/extract_and_upload operations"""
#print "Fetching new videos and consolidating queue..."
#yt.populate_queue()
#print "Downloading up to",max_downloads,"videos..."
#yt.dl(max_downloads)
extract_and_upload(vid_path, to_imgur, to_tumblr)
if search_interval:
search_timer = set_interval(search_interval, yt.populate_queue)
if dl_interval:
dl_timer = set_interval(dl_interval, yt.dl, max_downloads)
if post_interval:
post_timer = set_interval(post_interval, extract_and_upload, vid_path, to_imgur, to_tumblr)
if __name__ == '__main__':
print "Running from console..."
death_as_a_service()
<commit_msg>Stop getting videos every time script starts<commit_after> | import os
from death_extractor import youtube as yt
from death_extractor import set_interval
from death_extractor import extract_and_upload
def death_as_a_service(vid_path = 'vids', post_interval=3600*2, search_interval=3600*6, dl_interval=3600*3, max_downloads=4, to_imgur=True, to_tumblr=True):
"""Run periodic search/download/extract_and_upload operations"""
print "Fetching new videos and consolidating queue..."
yt.populate_queue()
if len([file for file in os.listdir(vid_path) if not file.endswith('part') and not file.startswith('.')]) < 4:
print "Downloading up to",max_downloads,"videos..."
yt.dl(max_downloads)
extract_and_upload(vid_path, to_imgur, to_tumblr)
if search_interval:
search_timer = set_interval(search_interval, yt.populate_queue)
if dl_interval:
dl_timer = set_interval(dl_interval, yt.dl, max_downloads)
if post_interval:
post_timer = set_interval(post_interval, extract_and_upload, vid_path, to_imgur, to_tumblr)
if __name__ == '__main__':
print "Running from console..."
death_as_a_service()
| from death_extractor import youtube as yt
from death_extractor import set_interval
from death_extractor import extract_and_upload
def death_as_a_service(vid_path = 'vids', post_interval=3600*2, search_interval=3600*12, dl_interval=3600*6, max_downloads=5, to_imgur=True, to_tumblr=True):
"""Run periodic search/download/extract_and_upload operations"""
#print "Fetching new videos and consolidating queue..."
#yt.populate_queue()
#print "Downloading up to",max_downloads,"videos..."
#yt.dl(max_downloads)
extract_and_upload(vid_path, to_imgur, to_tumblr)
if search_interval:
search_timer = set_interval(search_interval, yt.populate_queue)
if dl_interval:
dl_timer = set_interval(dl_interval, yt.dl, max_downloads)
if post_interval:
post_timer = set_interval(post_interval, extract_and_upload, vid_path, to_imgur, to_tumblr)
if __name__ == '__main__':
print "Running from console..."
death_as_a_service()
Stop getting videos every time script startsimport os
from death_extractor import youtube as yt
from death_extractor import set_interval
from death_extractor import extract_and_upload
def death_as_a_service(vid_path = 'vids', post_interval=3600*2, search_interval=3600*6, dl_interval=3600*3, max_downloads=4, to_imgur=True, to_tumblr=True):
"""Run periodic search/download/extract_and_upload operations"""
print "Fetching new videos and consolidating queue..."
yt.populate_queue()
if len([file for file in os.listdir(vid_path) if not file.endswith('part') and not file.startswith('.')]) < 4:
print "Downloading up to",max_downloads,"videos..."
yt.dl(max_downloads)
extract_and_upload(vid_path, to_imgur, to_tumblr)
if search_interval:
search_timer = set_interval(search_interval, yt.populate_queue)
if dl_interval:
dl_timer = set_interval(dl_interval, yt.dl, max_downloads)
if post_interval:
post_timer = set_interval(post_interval, extract_and_upload, vid_path, to_imgur, to_tumblr)
if __name__ == '__main__':
print "Running from console..."
death_as_a_service()
| <commit_before>from death_extractor import youtube as yt
from death_extractor import set_interval
from death_extractor import extract_and_upload
def death_as_a_service(vid_path = 'vids', post_interval=3600*2, search_interval=3600*12, dl_interval=3600*6, max_downloads=5, to_imgur=True, to_tumblr=True):
"""Run periodic search/download/extract_and_upload operations"""
#print "Fetching new videos and consolidating queue..."
#yt.populate_queue()
#print "Downloading up to",max_downloads,"videos..."
#yt.dl(max_downloads)
extract_and_upload(vid_path, to_imgur, to_tumblr)
if search_interval:
search_timer = set_interval(search_interval, yt.populate_queue)
if dl_interval:
dl_timer = set_interval(dl_interval, yt.dl, max_downloads)
if post_interval:
post_timer = set_interval(post_interval, extract_and_upload, vid_path, to_imgur, to_tumblr)
if __name__ == '__main__':
print "Running from console..."
death_as_a_service()
<commit_msg>Stop getting videos every time script starts<commit_after>import os
from death_extractor import youtube as yt
from death_extractor import set_interval
from death_extractor import extract_and_upload
def death_as_a_service(vid_path = 'vids', post_interval=3600*2, search_interval=3600*6, dl_interval=3600*3, max_downloads=4, to_imgur=True, to_tumblr=True):
"""Run periodic search/download/extract_and_upload operations"""
print "Fetching new videos and consolidating queue..."
yt.populate_queue()
if len([file for file in os.listdir(vid_path) if not file.endswith('part') and not file.startswith('.')]) < 4:
print "Downloading up to",max_downloads,"videos..."
yt.dl(max_downloads)
extract_and_upload(vid_path, to_imgur, to_tumblr)
if search_interval:
search_timer = set_interval(search_interval, yt.populate_queue)
if dl_interval:
dl_timer = set_interval(dl_interval, yt.dl, max_downloads)
if post_interval:
post_timer = set_interval(post_interval, extract_and_upload, vid_path, to_imgur, to_tumblr)
if __name__ == '__main__':
print "Running from console..."
death_as_a_service()
|
31a15a6a1d698a9db403b106edb5023b8b1ad0b3 | collector/classes/service.py | collector/classes/service.py | import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def identifier(self):
"""Return a unique identifier for the service"""
# TODO: How do we uniquely identify a service?
service_title = self.service_title()
dept = self.abbreviated_department()
return sanitise_string(u'{0} {1} {2}'.format(self.numeric_id, dept, service_title))
def get(self, key):
return self.detailed_data[key]
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
| # -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def identifier(self):
"""Return a unique identifier for the service"""
# TODO: How do we uniquely identify a service?
service_title = self.service_title()
dept = self.abbreviated_department()
return sanitise_string(u'{0} {1} {2}'.format(self.numeric_id, dept, service_title))
def attribute_exists(self, key):
return key in self.detailed_data
def get(self, key):
return self.detailed_data[key]
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
| Add a method to test whether an attribute exists | Add a method to test whether an attribute exists
| Python | mit | alphagov/backdrop-transactions-explorer-collector,alphagov/backdrop-transactions-explorer-collector | import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def identifier(self):
"""Return a unique identifier for the service"""
# TODO: How do we uniquely identify a service?
service_title = self.service_title()
dept = self.abbreviated_department()
return sanitise_string(u'{0} {1} {2}'.format(self.numeric_id, dept, service_title))
def get(self, key):
return self.detailed_data[key]
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
Add a method to test whether an attribute exists | # -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def identifier(self):
"""Return a unique identifier for the service"""
# TODO: How do we uniquely identify a service?
service_title = self.service_title()
dept = self.abbreviated_department()
return sanitise_string(u'{0} {1} {2}'.format(self.numeric_id, dept, service_title))
def attribute_exists(self, key):
return key in self.detailed_data
def get(self, key):
return self.detailed_data[key]
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
| <commit_before>import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def identifier(self):
"""Return a unique identifier for the service"""
# TODO: How do we uniquely identify a service?
service_title = self.service_title()
dept = self.abbreviated_department()
return sanitise_string(u'{0} {1} {2}'.format(self.numeric_id, dept, service_title))
def get(self, key):
return self.detailed_data[key]
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
<commit_msg>Add a method to test whether an attribute exists<commit_after> | # -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def identifier(self):
"""Return a unique identifier for the service"""
# TODO: How do we uniquely identify a service?
service_title = self.service_title()
dept = self.abbreviated_department()
return sanitise_string(u'{0} {1} {2}'.format(self.numeric_id, dept, service_title))
def attribute_exists(self, key):
return key in self.detailed_data
def get(self, key):
return self.detailed_data[key]
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
| import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def identifier(self):
"""Return a unique identifier for the service"""
# TODO: How do we uniquely identify a service?
service_title = self.service_title()
dept = self.abbreviated_department()
return sanitise_string(u'{0} {1} {2}'.format(self.numeric_id, dept, service_title))
def get(self, key):
return self.detailed_data[key]
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
Add a method to test whether an attribute exists# -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def identifier(self):
"""Return a unique identifier for the service"""
# TODO: How do we uniquely identify a service?
service_title = self.service_title()
dept = self.abbreviated_department()
return sanitise_string(u'{0} {1} {2}'.format(self.numeric_id, dept, service_title))
def attribute_exists(self, key):
return key in self.detailed_data
def get(self, key):
return self.detailed_data[key]
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
| <commit_before>import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def identifier(self):
"""Return a unique identifier for the service"""
# TODO: How do we uniquely identify a service?
service_title = self.service_title()
dept = self.abbreviated_department()
return sanitise_string(u'{0} {1} {2}'.format(self.numeric_id, dept, service_title))
def get(self, key):
return self.detailed_data[key]
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
<commit_msg>Add a method to test whether an attribute exists<commit_after># -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def identifier(self):
"""Return a unique identifier for the service"""
# TODO: How do we uniquely identify a service?
service_title = self.service_title()
dept = self.abbreviated_department()
return sanitise_string(u'{0} {1} {2}'.format(self.numeric_id, dept, service_title))
def attribute_exists(self, key):
return key in self.detailed_data
def get(self, key):
return self.detailed_data[key]
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
|
cb97436aa76ffc65d4c6488ddac854eeca0dbd36 | fullcalendar/admin.py | fullcalendar/admin.py | from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import TabularDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(TabularDynamicInlineAdmin):
model = Occurrence
extra = 1
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
| from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import TabularDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(TabularDynamicInlineAdmin):
model = Occurrence
extra = 1
fields = ('start_time', 'end_time', 'description')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
| Reorder fields for occurence inline | Reorder fields for occurence inline
| Python | mit | jonge-democraten/mezzanine-fullcalendar | from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import TabularDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(TabularDynamicInlineAdmin):
model = Occurrence
extra = 1
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
Reorder fields for occurence inline | from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import TabularDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(TabularDynamicInlineAdmin):
model = Occurrence
extra = 1
fields = ('start_time', 'end_time', 'description')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
| <commit_before>from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import TabularDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(TabularDynamicInlineAdmin):
model = Occurrence
extra = 1
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
<commit_msg>Reorder fields for occurence inline<commit_after> | from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import TabularDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(TabularDynamicInlineAdmin):
model = Occurrence
extra = 1
fields = ('start_time', 'end_time', 'description')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
| from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import TabularDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(TabularDynamicInlineAdmin):
model = Occurrence
extra = 1
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
Reorder fields for occurence inlinefrom django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import TabularDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(TabularDynamicInlineAdmin):
model = Occurrence
extra = 1
fields = ('start_time', 'end_time', 'description')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
| <commit_before>from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import TabularDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(TabularDynamicInlineAdmin):
model = Occurrence
extra = 1
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
<commit_msg>Reorder fields for occurence inline<commit_after>from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import TabularDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(TabularDynamicInlineAdmin):
model = Occurrence
extra = 1
fields = ('start_time', 'end_time', 'description')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
|
9d05f18dcb4b52c1d4e68f53f24e5ccebab10a58 | bot/models.py | bot/models.py | from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://avvcurseaphtxf:X0466JySVtLq6nyq_5pb7BQNjR@'
'ec2-54-227-250-80.compute-1.amazonaws.com'
':5432/d7do67r1b7t1nn', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
| from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://fbcmeskynsvati:aURfAdENt6-kumO0j224GuXRWH'
'@ec2-54-221-235-135.compute-1.amazonaws.com'
':5432/d2cc1tb2t1iges', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
| Change database url for create_engine() | Change database url for create_engine()
| Python | mit | alexbotello/BastionBot | from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://avvcurseaphtxf:X0466JySVtLq6nyq_5pb7BQNjR@'
'ec2-54-227-250-80.compute-1.amazonaws.com'
':5432/d7do67r1b7t1nn', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
Change database url for create_engine() | from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://fbcmeskynsvati:aURfAdENt6-kumO0j224GuXRWH'
'@ec2-54-221-235-135.compute-1.amazonaws.com'
':5432/d2cc1tb2t1iges', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
| <commit_before>from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://avvcurseaphtxf:X0466JySVtLq6nyq_5pb7BQNjR@'
'ec2-54-227-250-80.compute-1.amazonaws.com'
':5432/d7do67r1b7t1nn', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
<commit_msg>Change database url for create_engine()<commit_after> | from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://fbcmeskynsvati:aURfAdENt6-kumO0j224GuXRWH'
'@ec2-54-221-235-135.compute-1.amazonaws.com'
':5432/d2cc1tb2t1iges', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
| from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://avvcurseaphtxf:X0466JySVtLq6nyq_5pb7BQNjR@'
'ec2-54-227-250-80.compute-1.amazonaws.com'
':5432/d7do67r1b7t1nn', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
Change database url for create_engine()from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://fbcmeskynsvati:aURfAdENt6-kumO0j224GuXRWH'
'@ec2-54-221-235-135.compute-1.amazonaws.com'
':5432/d2cc1tb2t1iges', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
| <commit_before>from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://avvcurseaphtxf:X0466JySVtLq6nyq_5pb7BQNjR@'
'ec2-54-227-250-80.compute-1.amazonaws.com'
':5432/d7do67r1b7t1nn', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
<commit_msg>Change database url for create_engine()<commit_after>from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://fbcmeskynsvati:aURfAdENt6-kumO0j224GuXRWH'
'@ec2-54-221-235-135.compute-1.amazonaws.com'
':5432/d2cc1tb2t1iges', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
|
80e4caad24bceabd8e15133a96a6aaddd9a97c07 | code/type_null_true_false.py | code/type_null_true_false.py | def if_value(values):
print('"if value":')
for k, v in values:
print("%s - %s" % (k, 'true' if v else 'false'))
print()
def nil_value(values):
print('"if value is None":')
for k, v in values:
print("%s - %s" % (k, 'true' if v is None else 'false'))
print()
def empty_value(values):
print('"if len(value)":')
for k, v in values:
try:
print("%s - %s" % (k, 'true' if len(v) else 'false'))
except TypeError as e:
print("%s - %s" % (k, e))
values = [
("'string'", 'string'),
("''", ''),
('[1, 2, 3]', [1, 2, 3]),
('[]', []),
('5', 5),
('0', 0),
(True, True),
(False, False),
(None, None),
]
if_value(values)
nil_value(values)
empty_value(values)
| def check(label, fn, values):
print(label)
for value in values:
try:
result = 'true' if fn(value) else 'false'
except TypeError as e:
result = 'error: %s' % e
print(" %-9r - %s" % (value, result))
print()
values = ['string', '', [1, 2, 3], [], 5, 0, True, False, None]
check('if value:', lambda v: v, values)
check('if value is None:', lambda v: v is None, values)
check('if len(value):', lambda v: len(v), values)
| Refactor Null/True/False to look more pythonic | Refactor Null/True/False to look more pythonic
| Python | mit | evmorov/lang-compare,Evmorov/ruby-coffeescript,evmorov/lang-compare,evmorov/lang-compare,Evmorov/ruby-coffeescript,evmorov/lang-compare,Evmorov/ruby-coffeescript,evmorov/lang-compare,evmorov/lang-compare | def if_value(values):
print('"if value":')
for k, v in values:
print("%s - %s" % (k, 'true' if v else 'false'))
print()
def nil_value(values):
print('"if value is None":')
for k, v in values:
print("%s - %s" % (k, 'true' if v is None else 'false'))
print()
def empty_value(values):
print('"if len(value)":')
for k, v in values:
try:
print("%s - %s" % (k, 'true' if len(v) else 'false'))
except TypeError as e:
print("%s - %s" % (k, e))
values = [
("'string'", 'string'),
("''", ''),
('[1, 2, 3]', [1, 2, 3]),
('[]', []),
('5', 5),
('0', 0),
(True, True),
(False, False),
(None, None),
]
if_value(values)
nil_value(values)
empty_value(values)
Refactor Null/True/False to look more pythonic | def check(label, fn, values):
print(label)
for value in values:
try:
result = 'true' if fn(value) else 'false'
except TypeError as e:
result = 'error: %s' % e
print(" %-9r - %s" % (value, result))
print()
values = ['string', '', [1, 2, 3], [], 5, 0, True, False, None]
check('if value:', lambda v: v, values)
check('if value is None:', lambda v: v is None, values)
check('if len(value):', lambda v: len(v), values)
| <commit_before>def if_value(values):
print('"if value":')
for k, v in values:
print("%s - %s" % (k, 'true' if v else 'false'))
print()
def nil_value(values):
print('"if value is None":')
for k, v in values:
print("%s - %s" % (k, 'true' if v is None else 'false'))
print()
def empty_value(values):
print('"if len(value)":')
for k, v in values:
try:
print("%s - %s" % (k, 'true' if len(v) else 'false'))
except TypeError as e:
print("%s - %s" % (k, e))
values = [
("'string'", 'string'),
("''", ''),
('[1, 2, 3]', [1, 2, 3]),
('[]', []),
('5', 5),
('0', 0),
(True, True),
(False, False),
(None, None),
]
if_value(values)
nil_value(values)
empty_value(values)
<commit_msg>Refactor Null/True/False to look more pythonic<commit_after> | def check(label, fn, values):
print(label)
for value in values:
try:
result = 'true' if fn(value) else 'false'
except TypeError as e:
result = 'error: %s' % e
print(" %-9r - %s" % (value, result))
print()
values = ['string', '', [1, 2, 3], [], 5, 0, True, False, None]
check('if value:', lambda v: v, values)
check('if value is None:', lambda v: v is None, values)
check('if len(value):', lambda v: len(v), values)
| def if_value(values):
print('"if value":')
for k, v in values:
print("%s - %s" % (k, 'true' if v else 'false'))
print()
def nil_value(values):
print('"if value is None":')
for k, v in values:
print("%s - %s" % (k, 'true' if v is None else 'false'))
print()
def empty_value(values):
print('"if len(value)":')
for k, v in values:
try:
print("%s - %s" % (k, 'true' if len(v) else 'false'))
except TypeError as e:
print("%s - %s" % (k, e))
values = [
("'string'", 'string'),
("''", ''),
('[1, 2, 3]', [1, 2, 3]),
('[]', []),
('5', 5),
('0', 0),
(True, True),
(False, False),
(None, None),
]
if_value(values)
nil_value(values)
empty_value(values)
Refactor Null/True/False to look more pythonicdef check(label, fn, values):
print(label)
for value in values:
try:
result = 'true' if fn(value) else 'false'
except TypeError as e:
result = 'error: %s' % e
print(" %-9r - %s" % (value, result))
print()
values = ['string', '', [1, 2, 3], [], 5, 0, True, False, None]
check('if value:', lambda v: v, values)
check('if value is None:', lambda v: v is None, values)
check('if len(value):', lambda v: len(v), values)
| <commit_before>def if_value(values):
print('"if value":')
for k, v in values:
print("%s - %s" % (k, 'true' if v else 'false'))
print()
def nil_value(values):
print('"if value is None":')
for k, v in values:
print("%s - %s" % (k, 'true' if v is None else 'false'))
print()
def empty_value(values):
print('"if len(value)":')
for k, v in values:
try:
print("%s - %s" % (k, 'true' if len(v) else 'false'))
except TypeError as e:
print("%s - %s" % (k, e))
values = [
("'string'", 'string'),
("''", ''),
('[1, 2, 3]', [1, 2, 3]),
('[]', []),
('5', 5),
('0', 0),
(True, True),
(False, False),
(None, None),
]
if_value(values)
nil_value(values)
empty_value(values)
<commit_msg>Refactor Null/True/False to look more pythonic<commit_after>def check(label, fn, values):
print(label)
for value in values:
try:
result = 'true' if fn(value) else 'false'
except TypeError as e:
result = 'error: %s' % e
print(" %-9r - %s" % (value, result))
print()
values = ['string', '', [1, 2, 3], [], 5, 0, True, False, None]
check('if value:', lambda v: v, values)
check('if value is None:', lambda v: v is None, values)
check('if len(value):', lambda v: len(v), values)
|
927015af4717ffdb52f30f5b931dbba241e1c540 | pixiv_hack.py | pixiv_hack.py | from lib.cls_crawl import PixivHackLib
if __name__ == '__main__':
print('Enter PHPSESSID:')
PHPSESSID = raw_input()
print('Enter keyword:')
key_word = raw_input()
print('Enter minimum ratings:')
min_ratings = raw_input()
print('Enter maximum number of illustrations to download:')
max_pics = raw_input()
print('Download manga? (Y/n)')
download_manga_str = raw_input()
if (download_manga_str == 'Y' or download_manga_str == 'y'):
print('Will download manga.')
download_manga = True
else:
print('Will not download manga.')
download_manga = False
lib = PixivHackLib()
lib.session_id = PHPSESSID
lib.config('kancolle', int(min_ratings), int(max_pics), download_manga)
lib.crawl()
| from lib.cls_crawl import PixivHackLib
if __name__ == '__main__':
print('Enter PHPSESSID:')
PHPSESSID = raw_input()
print('Enter keyword:')
key_word = raw_input()
print('Enter minimum ratings:')
min_ratings = raw_input()
print('Enter maximum number of illustrations to download:')
max_pics = raw_input()
print('Download manga? (Y/n)')
download_manga_str = raw_input()
if (download_manga_str == 'Y' or download_manga_str == 'y'):
print('Will download manga.')
download_manga = True
else:
print('Will not download manga.')
download_manga = False
lib = PixivHackLib()
lib.session_id = PHPSESSID
lib.config(key_word, int(min_ratings), int(max_pics), download_manga)
lib.crawl()
| Fix bug: key_word not passed | Fix bug: key_word not passed
| Python | mit | Chion82/PixivHack | from lib.cls_crawl import PixivHackLib
if __name__ == '__main__':
print('Enter PHPSESSID:')
PHPSESSID = raw_input()
print('Enter keyword:')
key_word = raw_input()
print('Enter minimum ratings:')
min_ratings = raw_input()
print('Enter maximum number of illustrations to download:')
max_pics = raw_input()
print('Download manga? (Y/n)')
download_manga_str = raw_input()
if (download_manga_str == 'Y' or download_manga_str == 'y'):
print('Will download manga.')
download_manga = True
else:
print('Will not download manga.')
download_manga = False
lib = PixivHackLib()
lib.session_id = PHPSESSID
lib.config('kancolle', int(min_ratings), int(max_pics), download_manga)
lib.crawl()
Fix bug: key_word not passed | from lib.cls_crawl import PixivHackLib
if __name__ == '__main__':
print('Enter PHPSESSID:')
PHPSESSID = raw_input()
print('Enter keyword:')
key_word = raw_input()
print('Enter minimum ratings:')
min_ratings = raw_input()
print('Enter maximum number of illustrations to download:')
max_pics = raw_input()
print('Download manga? (Y/n)')
download_manga_str = raw_input()
if (download_manga_str == 'Y' or download_manga_str == 'y'):
print('Will download manga.')
download_manga = True
else:
print('Will not download manga.')
download_manga = False
lib = PixivHackLib()
lib.session_id = PHPSESSID
lib.config(key_word, int(min_ratings), int(max_pics), download_manga)
lib.crawl()
| <commit_before>from lib.cls_crawl import PixivHackLib
if __name__ == '__main__':
print('Enter PHPSESSID:')
PHPSESSID = raw_input()
print('Enter keyword:')
key_word = raw_input()
print('Enter minimum ratings:')
min_ratings = raw_input()
print('Enter maximum number of illustrations to download:')
max_pics = raw_input()
print('Download manga? (Y/n)')
download_manga_str = raw_input()
if (download_manga_str == 'Y' or download_manga_str == 'y'):
print('Will download manga.')
download_manga = True
else:
print('Will not download manga.')
download_manga = False
lib = PixivHackLib()
lib.session_id = PHPSESSID
lib.config('kancolle', int(min_ratings), int(max_pics), download_manga)
lib.crawl()
<commit_msg>Fix bug: key_word not passed<commit_after> | from lib.cls_crawl import PixivHackLib
if __name__ == '__main__':
print('Enter PHPSESSID:')
PHPSESSID = raw_input()
print('Enter keyword:')
key_word = raw_input()
print('Enter minimum ratings:')
min_ratings = raw_input()
print('Enter maximum number of illustrations to download:')
max_pics = raw_input()
print('Download manga? (Y/n)')
download_manga_str = raw_input()
if (download_manga_str == 'Y' or download_manga_str == 'y'):
print('Will download manga.')
download_manga = True
else:
print('Will not download manga.')
download_manga = False
lib = PixivHackLib()
lib.session_id = PHPSESSID
lib.config(key_word, int(min_ratings), int(max_pics), download_manga)
lib.crawl()
| from lib.cls_crawl import PixivHackLib
if __name__ == '__main__':
print('Enter PHPSESSID:')
PHPSESSID = raw_input()
print('Enter keyword:')
key_word = raw_input()
print('Enter minimum ratings:')
min_ratings = raw_input()
print('Enter maximum number of illustrations to download:')
max_pics = raw_input()
print('Download manga? (Y/n)')
download_manga_str = raw_input()
if (download_manga_str == 'Y' or download_manga_str == 'y'):
print('Will download manga.')
download_manga = True
else:
print('Will not download manga.')
download_manga = False
lib = PixivHackLib()
lib.session_id = PHPSESSID
lib.config('kancolle', int(min_ratings), int(max_pics), download_manga)
lib.crawl()
Fix bug: key_word not passedfrom lib.cls_crawl import PixivHackLib
if __name__ == '__main__':
print('Enter PHPSESSID:')
PHPSESSID = raw_input()
print('Enter keyword:')
key_word = raw_input()
print('Enter minimum ratings:')
min_ratings = raw_input()
print('Enter maximum number of illustrations to download:')
max_pics = raw_input()
print('Download manga? (Y/n)')
download_manga_str = raw_input()
if (download_manga_str == 'Y' or download_manga_str == 'y'):
print('Will download manga.')
download_manga = True
else:
print('Will not download manga.')
download_manga = False
lib = PixivHackLib()
lib.session_id = PHPSESSID
lib.config(key_word, int(min_ratings), int(max_pics), download_manga)
lib.crawl()
| <commit_before>from lib.cls_crawl import PixivHackLib
if __name__ == '__main__':
print('Enter PHPSESSID:')
PHPSESSID = raw_input()
print('Enter keyword:')
key_word = raw_input()
print('Enter minimum ratings:')
min_ratings = raw_input()
print('Enter maximum number of illustrations to download:')
max_pics = raw_input()
print('Download manga? (Y/n)')
download_manga_str = raw_input()
if (download_manga_str == 'Y' or download_manga_str == 'y'):
print('Will download manga.')
download_manga = True
else:
print('Will not download manga.')
download_manga = False
lib = PixivHackLib()
lib.session_id = PHPSESSID
lib.config('kancolle', int(min_ratings), int(max_pics), download_manga)
lib.crawl()
<commit_msg>Fix bug: key_word not passed<commit_after>from lib.cls_crawl import PixivHackLib
if __name__ == '__main__':
print('Enter PHPSESSID:')
PHPSESSID = raw_input()
print('Enter keyword:')
key_word = raw_input()
print('Enter minimum ratings:')
min_ratings = raw_input()
print('Enter maximum number of illustrations to download:')
max_pics = raw_input()
print('Download manga? (Y/n)')
download_manga_str = raw_input()
if (download_manga_str == 'Y' or download_manga_str == 'y'):
print('Will download manga.')
download_manga = True
else:
print('Will not download manga.')
download_manga = False
lib = PixivHackLib()
lib.session_id = PHPSESSID
lib.config(key_word, int(min_ratings), int(max_pics), download_manga)
lib.crawl()
|
645cbd9a4445898f69b1ecd9f3db7d5e7b7b9dbd | libqtile/layout/__init__.py | libqtile/layout/__init__.py | from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
| from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
from xmonad import MonadTall
| Add MonadTall to layout module proper. | Add MonadTall to layout module proper.
Fixes #126
| Python | mit | nxnfufunezn/qtile,qtile/qtile,de-vri-es/qtile,jdowner/qtile,tych0/qtile,w1ndy/qtile,soulchainer/qtile,rxcomm/qtile,flacjacket/qtile,w1ndy/qtile,ramnes/qtile,andrewyoung1991/qtile,frostidaho/qtile,himaaaatti/qtile,cortesi/qtile,encukou/qtile,himaaaatti/qtile,de-vri-es/qtile,kopchik/qtile,qtile/qtile,zordsdavini/qtile,StephenBarnes/qtile,flacjacket/qtile,kiniou/qtile,tych0/qtile,rxcomm/qtile,frostidaho/qtile,farebord/qtile,EndPointCorp/qtile,encukou/qtile,zordsdavini/qtile,jdowner/qtile,apinsard/qtile,aniruddhkanojia/qtile,xplv/qtile,ramnes/qtile,apinsard/qtile,kseistrup/qtile,cortesi/qtile,dequis/qtile,farebord/qtile,xplv/qtile,EndPointCorp/qtile,kopchik/qtile,nxnfufunezn/qtile,soulchainer/qtile,kseistrup/qtile,dequis/qtile,aniruddhkanojia/qtile,andrewyoung1991/qtile,StephenBarnes/qtile,kynikos/qtile,kiniou/qtile,kynikos/qtile | from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
Add MonadTall to layout module proper.
Fixes #126 | from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
from xmonad import MonadTall
| <commit_before>from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
<commit_msg>Add MonadTall to layout module proper.
Fixes #126<commit_after> | from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
from xmonad import MonadTall
| from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
Add MonadTall to layout module proper.
Fixes #126from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
from xmonad import MonadTall
| <commit_before>from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
<commit_msg>Add MonadTall to layout module proper.
Fixes #126<commit_after>from stack import Stack
from max import Max
from tile import Tile
from floating import Floating
from ratiotile import RatioTile
from tree import TreeTab
from slice import Slice
from xmonad import MonadTall
|
57c6c4108c949afb1e2ef682f980488a0d9610f4 | project_fish/whats_fresh/tests/test_preparation_model.py | project_fish/whats_fresh/tests/test_preparation_model.py | from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
u'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
| from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
| Change unicode test string to ascii | Change unicode test string to ascii
| Python | apache-2.0 | osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,iCHAIT/whats-fresh-api,iCHAIT/whats-fresh-api | from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
u'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
Change unicode test string to ascii | from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
| <commit_before>from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
u'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
<commit_msg>Change unicode test string to ascii<commit_after> | from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
| from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
u'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
Change unicode test string to asciifrom django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
| <commit_before>from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
u'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
<commit_msg>Change unicode test string to ascii<commit_after>from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
e99697b18c7ec6052ed161467197b0e86ed3603d | nbgrader/preprocessors/execute.py | nbgrader/preprocessors/execute.py | from nbconvert.preprocessors import ExecutePreprocessor
from traitlets import Bool, List
from . import NbGraderPreprocessor
class Execute(NbGraderPreprocessor, ExecutePreprocessor):
interrupt_on_timeout = Bool(True)
allow_errors = Bool(True)
extra_arguments = List(["--HistoryManager.hist_file=:memory:"])
| from nbconvert.preprocessors import ExecutePreprocessor
from traitlets import Bool, List
from textwrap import dedent
from . import NbGraderPreprocessor
class Execute(NbGraderPreprocessor, ExecutePreprocessor):
interrupt_on_timeout = Bool(True)
allow_errors = Bool(True)
extra_arguments = List([], config=True, help=dedent(
"""
A list of extra arguments to pass to the kernel. For python kernels,
this defaults to ``--HistoryManager.hist_file=:memory:``. For other
kernels this is just an empty list.
"""))
def preprocess(self, nb, resources):
kernel_name = nb.metadata.get('kernelspec', {}).get('name', 'python')
if self.extra_arguments == [] and kernel_name == "python":
self.extra_arguments = ["--HistoryManager.hist_file=:memory:"]
return super(Execute, self).preprocess(nb, resources)
| Change options so other kernels work with nbgrader | Change options so other kernels work with nbgrader
| Python | bsd-3-clause | ellisonbg/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,jhamrick/nbgrader | from nbconvert.preprocessors import ExecutePreprocessor
from traitlets import Bool, List
from . import NbGraderPreprocessor
class Execute(NbGraderPreprocessor, ExecutePreprocessor):
interrupt_on_timeout = Bool(True)
allow_errors = Bool(True)
extra_arguments = List(["--HistoryManager.hist_file=:memory:"])
Change options so other kernels work with nbgrader | from nbconvert.preprocessors import ExecutePreprocessor
from traitlets import Bool, List
from textwrap import dedent
from . import NbGraderPreprocessor
class Execute(NbGraderPreprocessor, ExecutePreprocessor):
interrupt_on_timeout = Bool(True)
allow_errors = Bool(True)
extra_arguments = List([], config=True, help=dedent(
"""
A list of extra arguments to pass to the kernel. For python kernels,
this defaults to ``--HistoryManager.hist_file=:memory:``. For other
kernels this is just an empty list.
"""))
def preprocess(self, nb, resources):
kernel_name = nb.metadata.get('kernelspec', {}).get('name', 'python')
if self.extra_arguments == [] and kernel_name == "python":
self.extra_arguments = ["--HistoryManager.hist_file=:memory:"]
return super(Execute, self).preprocess(nb, resources)
| <commit_before>from nbconvert.preprocessors import ExecutePreprocessor
from traitlets import Bool, List
from . import NbGraderPreprocessor
class Execute(NbGraderPreprocessor, ExecutePreprocessor):
interrupt_on_timeout = Bool(True)
allow_errors = Bool(True)
extra_arguments = List(["--HistoryManager.hist_file=:memory:"])
<commit_msg>Change options so other kernels work with nbgrader<commit_after> | from nbconvert.preprocessors import ExecutePreprocessor
from traitlets import Bool, List
from textwrap import dedent
from . import NbGraderPreprocessor
class Execute(NbGraderPreprocessor, ExecutePreprocessor):
interrupt_on_timeout = Bool(True)
allow_errors = Bool(True)
extra_arguments = List([], config=True, help=dedent(
"""
A list of extra arguments to pass to the kernel. For python kernels,
this defaults to ``--HistoryManager.hist_file=:memory:``. For other
kernels this is just an empty list.
"""))
def preprocess(self, nb, resources):
kernel_name = nb.metadata.get('kernelspec', {}).get('name', 'python')
if self.extra_arguments == [] and kernel_name == "python":
self.extra_arguments = ["--HistoryManager.hist_file=:memory:"]
return super(Execute, self).preprocess(nb, resources)
| from nbconvert.preprocessors import ExecutePreprocessor
from traitlets import Bool, List
from . import NbGraderPreprocessor
class Execute(NbGraderPreprocessor, ExecutePreprocessor):
interrupt_on_timeout = Bool(True)
allow_errors = Bool(True)
extra_arguments = List(["--HistoryManager.hist_file=:memory:"])
Change options so other kernels work with nbgraderfrom nbconvert.preprocessors import ExecutePreprocessor
from traitlets import Bool, List
from textwrap import dedent
from . import NbGraderPreprocessor
class Execute(NbGraderPreprocessor, ExecutePreprocessor):
interrupt_on_timeout = Bool(True)
allow_errors = Bool(True)
extra_arguments = List([], config=True, help=dedent(
"""
A list of extra arguments to pass to the kernel. For python kernels,
this defaults to ``--HistoryManager.hist_file=:memory:``. For other
kernels this is just an empty list.
"""))
def preprocess(self, nb, resources):
kernel_name = nb.metadata.get('kernelspec', {}).get('name', 'python')
if self.extra_arguments == [] and kernel_name == "python":
self.extra_arguments = ["--HistoryManager.hist_file=:memory:"]
return super(Execute, self).preprocess(nb, resources)
| <commit_before>from nbconvert.preprocessors import ExecutePreprocessor
from traitlets import Bool, List
from . import NbGraderPreprocessor
class Execute(NbGraderPreprocessor, ExecutePreprocessor):
interrupt_on_timeout = Bool(True)
allow_errors = Bool(True)
extra_arguments = List(["--HistoryManager.hist_file=:memory:"])
<commit_msg>Change options so other kernels work with nbgrader<commit_after>from nbconvert.preprocessors import ExecutePreprocessor
from traitlets import Bool, List
from textwrap import dedent
from . import NbGraderPreprocessor
class Execute(NbGraderPreprocessor, ExecutePreprocessor):
interrupt_on_timeout = Bool(True)
allow_errors = Bool(True)
extra_arguments = List([], config=True, help=dedent(
"""
A list of extra arguments to pass to the kernel. For python kernels,
this defaults to ``--HistoryManager.hist_file=:memory:``. For other
kernels this is just an empty list.
"""))
def preprocess(self, nb, resources):
kernel_name = nb.metadata.get('kernelspec', {}).get('name', 'python')
if self.extra_arguments == [] and kernel_name == "python":
self.extra_arguments = ["--HistoryManager.hist_file=:memory:"]
return super(Execute, self).preprocess(nb, resources)
|
9454bfa12e36cdab9bf803cf169c1d979bb27381 | cmus_notify/notifications.py | cmus_notify/notifications.py | """Contains code related to notifications."""
import notify2
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
| """Contains code related to notifications."""
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
import notify2
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
| Fix notify2 being imported with the module | Fix notify2 being imported with the module
| Python | mit | AntoineGagne/cmus-notify | """Contains code related to notifications."""
import notify2
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
Fix notify2 being imported with the module | """Contains code related to notifications."""
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
import notify2
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
| <commit_before>"""Contains code related to notifications."""
import notify2
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
<commit_msg>Fix notify2 being imported with the module<commit_after> | """Contains code related to notifications."""
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
import notify2
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
| """Contains code related to notifications."""
import notify2
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
Fix notify2 being imported with the module"""Contains code related to notifications."""
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
import notify2
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
| <commit_before>"""Contains code related to notifications."""
import notify2
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
<commit_msg>Fix notify2 being imported with the module<commit_after>"""Contains code related to notifications."""
from .constants import (DEFAULT_ICON_PATH,
DEFAULT_TIMEOUT,
ICONS_BY_STATUS)
from .formatters import format_notification_message
def send_notification(arguments, information):
"""Send the notification to the OS with a Python library.
:param arguments: The parsed arguments
:param information: The various song informations
"""
import notify2
notify2.init(arguments['application_name'])
title, text = format_notification_message(information,
title=arguments['title'],
body=arguments['body'])
notification = notify2.Notification(
title,
text,
ICONS_BY_STATUS.get('icon_path', DEFAULT_ICON_PATH)
)
notification.set_urgency(arguments.get('urgency', notify2.URGENCY_LOW))
notification.timeout = arguments.get('timeout', DEFAULT_TIMEOUT)
notification.show()
|
67733d8093980035ae4d212c8bf74fde9a59d983 | manoseimas/settings/testing.py | manoseimas/settings/testing.py | from manoseimas.settings.base import * # noqa
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
WEBPACK_LOADER.update({
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': os.path.join(BUILDOUT_DIR, 'webpack-stats.json'),
}
})
WEBPACK_COMMAND = ['npm', 'run', 'build:hot-reload']
| from manoseimas.settings.base import * # noqa
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
WEBPACK_LOADER.update({
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': os.path.join(BUILDOUT_DIR, 'webpack-stats.json'),
}
})
WEBPACK_COMMAND = ['npm', 'run', 'build:hot-reload']
# Database name should be taken from read_default_file, but that does not work
# with tests. Without database name running a single test gets 'test_' as
# database name.
DATABASES['default']['NAME'] = 'manoseimas'
| Fix annoying database issue when running tests | Fix annoying database issue when running tests
Now database name is taken from 'read_default_file' parameter which points to a
my.cnf configuration, where database name should be defined.
But this does not work when running tests for a single file, database name is
set to 'test_', but when running all tests, then database name is set to
'test_manoseimas'.
Setting database name in settings explicitly fixes this.
| Python | agpl-3.0 | ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt | from manoseimas.settings.base import * # noqa
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
WEBPACK_LOADER.update({
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': os.path.join(BUILDOUT_DIR, 'webpack-stats.json'),
}
})
WEBPACK_COMMAND = ['npm', 'run', 'build:hot-reload']
Fix annoying database issue when running tests
Now database name is taken from 'read_default_file' parameter which points to a
my.cnf configuration, where database name should be defined.
But this does not work when running tests for a single file, database name is
set to 'test_', but when running all tests, then database name is set to
'test_manoseimas'.
Setting database name in settings explicitly fixes this. | from manoseimas.settings.base import * # noqa
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
WEBPACK_LOADER.update({
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': os.path.join(BUILDOUT_DIR, 'webpack-stats.json'),
}
})
WEBPACK_COMMAND = ['npm', 'run', 'build:hot-reload']
# Database name should be taken from read_default_file, but that does not work
# with tests. Without database name running a single test gets 'test_' as
# database name.
DATABASES['default']['NAME'] = 'manoseimas'
| <commit_before>from manoseimas.settings.base import * # noqa
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
WEBPACK_LOADER.update({
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': os.path.join(BUILDOUT_DIR, 'webpack-stats.json'),
}
})
WEBPACK_COMMAND = ['npm', 'run', 'build:hot-reload']
<commit_msg>Fix annoying database issue when running tests
Now database name is taken from 'read_default_file' parameter which points to a
my.cnf configuration, where database name should be defined.
But this does not work when running tests for a single file, database name is
set to 'test_', but when running all tests, then database name is set to
'test_manoseimas'.
Setting database name in settings explicitly fixes this.<commit_after> | from manoseimas.settings.base import * # noqa
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
WEBPACK_LOADER.update({
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': os.path.join(BUILDOUT_DIR, 'webpack-stats.json'),
}
})
WEBPACK_COMMAND = ['npm', 'run', 'build:hot-reload']
# Database name should be taken from read_default_file, but that does not work
# with tests. Without database name running a single test gets 'test_' as
# database name.
DATABASES['default']['NAME'] = 'manoseimas'
| from manoseimas.settings.base import * # noqa
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
WEBPACK_LOADER.update({
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': os.path.join(BUILDOUT_DIR, 'webpack-stats.json'),
}
})
WEBPACK_COMMAND = ['npm', 'run', 'build:hot-reload']
Fix annoying database issue when running tests
Now database name is taken from 'read_default_file' parameter which points to a
my.cnf configuration, where database name should be defined.
But this does not work when running tests for a single file, database name is
set to 'test_', but when running all tests, then database name is set to
'test_manoseimas'.
Setting database name in settings explicitly fixes this.from manoseimas.settings.base import * # noqa
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
WEBPACK_LOADER.update({
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': os.path.join(BUILDOUT_DIR, 'webpack-stats.json'),
}
})
WEBPACK_COMMAND = ['npm', 'run', 'build:hot-reload']
# Database name should be taken from read_default_file, but that does not work
# with tests. Without database name running a single test gets 'test_' as
# database name.
DATABASES['default']['NAME'] = 'manoseimas'
| <commit_before>from manoseimas.settings.base import * # noqa
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
WEBPACK_LOADER.update({
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': os.path.join(BUILDOUT_DIR, 'webpack-stats.json'),
}
})
WEBPACK_COMMAND = ['npm', 'run', 'build:hot-reload']
<commit_msg>Fix annoying database issue when running tests
Now database name is taken from 'read_default_file' parameter which points to a
my.cnf configuration, where database name should be defined.
But this does not work when running tests for a single file, database name is
set to 'test_', but when running all tests, then database name is set to
'test_manoseimas'.
Setting database name in settings explicitly fixes this.<commit_after>from manoseimas.settings.base import * # noqa
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
WEBPACK_LOADER.update({
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': os.path.join(BUILDOUT_DIR, 'webpack-stats.json'),
}
})
WEBPACK_COMMAND = ['npm', 'run', 'build:hot-reload']
# Database name should be taken from read_default_file, but that does not work
# with tests. Without database name running a single test gets 'test_' as
# database name.
DATABASES['default']['NAME'] = 'manoseimas'
|
cc93d6b9ade1d15236904978f012f91b0a9d567d | examples/manage.py | examples/manage.py | import logging
from aio_manager import Manager
from aioapp.app import build_application
logging.basicConfig(level=logging.WARNING)
app = build_application()
manager = Manager(app)
# To support SQLAlchemy commands, use this
#
# from aio_manager.commands.ext import sqlalchemy
# sqlalchemy.configure_manager(manager, app, Base,
# settings.DATABASE_USERNAME,
# settings.DATABASE_NAME,
# settings.DATABASE_HOST,
# settings.DATABASE_PASSWORD)
if __name__ == "__main__":
manager.run()
| import logging
from aio_manager import Manager
from aioapp.app import build_application
logging.basicConfig(level=logging.WARNING)
app = build_application()
manager = Manager(app)
# To support SQLAlchemy commands, use this
#
# from aio_manager.commands.ext import sqlalchemy
# [from aiopg.sa import create_engine]
# sqlalchemy.configure_manager(manager, app, Base,
# settings.DATABASE_USERNAME,
# settings.DATABASE_PASSWORD,
# settings.DATABASE_NAME,
# settings.DATABASE_HOST,
# settings.DATABASE_PORT[,
# create_engine])
if __name__ == "__main__":
manager.run()
| Update sqlalchemy command configuration example | Update sqlalchemy command configuration example
| Python | bsd-3-clause | rrader/aio_manager | import logging
from aio_manager import Manager
from aioapp.app import build_application
logging.basicConfig(level=logging.WARNING)
app = build_application()
manager = Manager(app)
# To support SQLAlchemy commands, use this
#
# from aio_manager.commands.ext import sqlalchemy
# sqlalchemy.configure_manager(manager, app, Base,
# settings.DATABASE_USERNAME,
# settings.DATABASE_NAME,
# settings.DATABASE_HOST,
# settings.DATABASE_PASSWORD)
if __name__ == "__main__":
manager.run()
Update sqlalchemy command configuration example | import logging
from aio_manager import Manager
from aioapp.app import build_application
logging.basicConfig(level=logging.WARNING)
app = build_application()
manager = Manager(app)
# To support SQLAlchemy commands, use this
#
# from aio_manager.commands.ext import sqlalchemy
# [from aiopg.sa import create_engine]
# sqlalchemy.configure_manager(manager, app, Base,
# settings.DATABASE_USERNAME,
# settings.DATABASE_PASSWORD,
# settings.DATABASE_NAME,
# settings.DATABASE_HOST,
# settings.DATABASE_PORT[,
# create_engine])
if __name__ == "__main__":
manager.run()
| <commit_before>import logging
from aio_manager import Manager
from aioapp.app import build_application
logging.basicConfig(level=logging.WARNING)
app = build_application()
manager = Manager(app)
# To support SQLAlchemy commands, use this
#
# from aio_manager.commands.ext import sqlalchemy
# sqlalchemy.configure_manager(manager, app, Base,
# settings.DATABASE_USERNAME,
# settings.DATABASE_NAME,
# settings.DATABASE_HOST,
# settings.DATABASE_PASSWORD)
if __name__ == "__main__":
manager.run()
<commit_msg>Update sqlalchemy command configuration example<commit_after> | import logging
from aio_manager import Manager
from aioapp.app import build_application
logging.basicConfig(level=logging.WARNING)
app = build_application()
manager = Manager(app)
# To support SQLAlchemy commands, use this
#
# from aio_manager.commands.ext import sqlalchemy
# [from aiopg.sa import create_engine]
# sqlalchemy.configure_manager(manager, app, Base,
# settings.DATABASE_USERNAME,
# settings.DATABASE_PASSWORD,
# settings.DATABASE_NAME,
# settings.DATABASE_HOST,
# settings.DATABASE_PORT[,
# create_engine])
if __name__ == "__main__":
manager.run()
| import logging
from aio_manager import Manager
from aioapp.app import build_application
logging.basicConfig(level=logging.WARNING)
app = build_application()
manager = Manager(app)
# To support SQLAlchemy commands, use this
#
# from aio_manager.commands.ext import sqlalchemy
# sqlalchemy.configure_manager(manager, app, Base,
# settings.DATABASE_USERNAME,
# settings.DATABASE_NAME,
# settings.DATABASE_HOST,
# settings.DATABASE_PASSWORD)
if __name__ == "__main__":
manager.run()
Update sqlalchemy command configuration exampleimport logging
from aio_manager import Manager
from aioapp.app import build_application
logging.basicConfig(level=logging.WARNING)
app = build_application()
manager = Manager(app)
# To support SQLAlchemy commands, use this
#
# from aio_manager.commands.ext import sqlalchemy
# [from aiopg.sa import create_engine]
# sqlalchemy.configure_manager(manager, app, Base,
# settings.DATABASE_USERNAME,
# settings.DATABASE_PASSWORD,
# settings.DATABASE_NAME,
# settings.DATABASE_HOST,
# settings.DATABASE_PORT[,
# create_engine])
if __name__ == "__main__":
manager.run()
| <commit_before>import logging
from aio_manager import Manager
from aioapp.app import build_application
logging.basicConfig(level=logging.WARNING)
app = build_application()
manager = Manager(app)
# To support SQLAlchemy commands, use this
#
# from aio_manager.commands.ext import sqlalchemy
# sqlalchemy.configure_manager(manager, app, Base,
# settings.DATABASE_USERNAME,
# settings.DATABASE_NAME,
# settings.DATABASE_HOST,
# settings.DATABASE_PASSWORD)
if __name__ == "__main__":
manager.run()
<commit_msg>Update sqlalchemy command configuration example<commit_after>import logging
from aio_manager import Manager
from aioapp.app import build_application
logging.basicConfig(level=logging.WARNING)
app = build_application()
manager = Manager(app)
# To support SQLAlchemy commands, use this
#
# from aio_manager.commands.ext import sqlalchemy
# [from aiopg.sa import create_engine]
# sqlalchemy.configure_manager(manager, app, Base,
# settings.DATABASE_USERNAME,
# settings.DATABASE_PASSWORD,
# settings.DATABASE_NAME,
# settings.DATABASE_HOST,
# settings.DATABASE_PORT[,
# create_engine])
if __name__ == "__main__":
manager.run()
|
e6b3d51d44d834f434cea5a688f636eb912c067d | infernoshout/utils.py | infernoshout/utils.py | import collections
import http.cookies
import logging
class UniqueBuffer:
"""A simple deduplicating buffer. To add new items, manipulate self.items.
The actual buffer is not limited in length, the `buflen` argument is used
to specify the amount of items guaranteed to be unique."""
def __init__(self, buflen=21):
self.items = []
self.old = collections.deque(maxlen=buflen)
def pop_all(self):
"""Return all items and remove them from the buffer."""
ret = []
for i in self.items:
if i not in self.old:
ret.append(i)
self.old.append(i)
self.items = []
return ret
def atoi(string):
s = []
for i in string:
try:
int(i)
s.append(i)
except ValueError:
break
return int(''.join(s))
def dict_from_cookie_str(cookie_str):
c = http.cookies.SimpleCookie()
d = dict()
c.load(cookie_str)
for k, m in c.items():
d[k] = m.value
return d
| import collections
import http.cookies
import logging
class UniqueBuffer:
"""A simple deduplicating buffer. To add new items, manipulate self.items.
The actual buffer is not limited in length, the `buflen` argument is used
to specify the amount of items guaranteed to be unique."""
def __init__(self, buflen):
self.items = []
self.old = collections.deque(maxlen=buflen)
def pop_all(self):
"""Return all items and remove them from the buffer."""
ret = []
for i in self.items:
if i not in self.old:
ret.append(i)
self.old.append(i)
self.items = []
return ret
def atoi(string):
s = []
for i in string:
try:
int(i)
s.append(i)
except ValueError:
break
return int(''.join(s))
def dict_from_cookie_str(cookie_str):
c = http.cookies.SimpleCookie()
d = dict()
c.load(cookie_str)
for k, m in c.items():
d[k] = m.value
return d
| Remove default buflen value in UniqueBuffer.__init__() | Remove default buflen value in UniqueBuffer.__init__()
| Python | unlicense | tsudoko/infernoshout-py | import collections
import http.cookies
import logging
class UniqueBuffer:
"""A simple deduplicating buffer. To add new items, manipulate self.items.
The actual buffer is not limited in length, the `buflen` argument is used
to specify the amount of items guaranteed to be unique."""
def __init__(self, buflen=21):
self.items = []
self.old = collections.deque(maxlen=buflen)
def pop_all(self):
"""Return all items and remove them from the buffer."""
ret = []
for i in self.items:
if i not in self.old:
ret.append(i)
self.old.append(i)
self.items = []
return ret
def atoi(string):
s = []
for i in string:
try:
int(i)
s.append(i)
except ValueError:
break
return int(''.join(s))
def dict_from_cookie_str(cookie_str):
c = http.cookies.SimpleCookie()
d = dict()
c.load(cookie_str)
for k, m in c.items():
d[k] = m.value
return d
Remove default buflen value in UniqueBuffer.__init__() | import collections
import http.cookies
import logging
class UniqueBuffer:
"""A simple deduplicating buffer. To add new items, manipulate self.items.
The actual buffer is not limited in length, the `buflen` argument is used
to specify the amount of items guaranteed to be unique."""
def __init__(self, buflen):
self.items = []
self.old = collections.deque(maxlen=buflen)
def pop_all(self):
"""Return all items and remove them from the buffer."""
ret = []
for i in self.items:
if i not in self.old:
ret.append(i)
self.old.append(i)
self.items = []
return ret
def atoi(string):
s = []
for i in string:
try:
int(i)
s.append(i)
except ValueError:
break
return int(''.join(s))
def dict_from_cookie_str(cookie_str):
c = http.cookies.SimpleCookie()
d = dict()
c.load(cookie_str)
for k, m in c.items():
d[k] = m.value
return d
| <commit_before>import collections
import http.cookies
import logging
class UniqueBuffer:
"""A simple deduplicating buffer. To add new items, manipulate self.items.
The actual buffer is not limited in length, the `buflen` argument is used
to specify the amount of items guaranteed to be unique."""
def __init__(self, buflen=21):
self.items = []
self.old = collections.deque(maxlen=buflen)
def pop_all(self):
"""Return all items and remove them from the buffer."""
ret = []
for i in self.items:
if i not in self.old:
ret.append(i)
self.old.append(i)
self.items = []
return ret
def atoi(string):
s = []
for i in string:
try:
int(i)
s.append(i)
except ValueError:
break
return int(''.join(s))
def dict_from_cookie_str(cookie_str):
c = http.cookies.SimpleCookie()
d = dict()
c.load(cookie_str)
for k, m in c.items():
d[k] = m.value
return d
<commit_msg>Remove default buflen value in UniqueBuffer.__init__()<commit_after> | import collections
import http.cookies
import logging
class UniqueBuffer:
"""A simple deduplicating buffer. To add new items, manipulate self.items.
The actual buffer is not limited in length, the `buflen` argument is used
to specify the amount of items guaranteed to be unique."""
def __init__(self, buflen):
self.items = []
self.old = collections.deque(maxlen=buflen)
def pop_all(self):
"""Return all items and remove them from the buffer."""
ret = []
for i in self.items:
if i not in self.old:
ret.append(i)
self.old.append(i)
self.items = []
return ret
def atoi(string):
s = []
for i in string:
try:
int(i)
s.append(i)
except ValueError:
break
return int(''.join(s))
def dict_from_cookie_str(cookie_str):
c = http.cookies.SimpleCookie()
d = dict()
c.load(cookie_str)
for k, m in c.items():
d[k] = m.value
return d
| import collections
import http.cookies
import logging
class UniqueBuffer:
"""A simple deduplicating buffer. To add new items, manipulate self.items.
The actual buffer is not limited in length, the `buflen` argument is used
to specify the amount of items guaranteed to be unique."""
def __init__(self, buflen=21):
self.items = []
self.old = collections.deque(maxlen=buflen)
def pop_all(self):
"""Return all items and remove them from the buffer."""
ret = []
for i in self.items:
if i not in self.old:
ret.append(i)
self.old.append(i)
self.items = []
return ret
def atoi(string):
s = []
for i in string:
try:
int(i)
s.append(i)
except ValueError:
break
return int(''.join(s))
def dict_from_cookie_str(cookie_str):
c = http.cookies.SimpleCookie()
d = dict()
c.load(cookie_str)
for k, m in c.items():
d[k] = m.value
return d
Remove default buflen value in UniqueBuffer.__init__()import collections
import http.cookies
import logging
class UniqueBuffer:
"""A simple deduplicating buffer. To add new items, manipulate self.items.
The actual buffer is not limited in length, the `buflen` argument is used
to specify the amount of items guaranteed to be unique."""
def __init__(self, buflen):
self.items = []
self.old = collections.deque(maxlen=buflen)
def pop_all(self):
"""Return all items and remove them from the buffer."""
ret = []
for i in self.items:
if i not in self.old:
ret.append(i)
self.old.append(i)
self.items = []
return ret
def atoi(string):
s = []
for i in string:
try:
int(i)
s.append(i)
except ValueError:
break
return int(''.join(s))
def dict_from_cookie_str(cookie_str):
c = http.cookies.SimpleCookie()
d = dict()
c.load(cookie_str)
for k, m in c.items():
d[k] = m.value
return d
| <commit_before>import collections
import http.cookies
import logging
class UniqueBuffer:
"""A simple deduplicating buffer. To add new items, manipulate self.items.
The actual buffer is not limited in length, the `buflen` argument is used
to specify the amount of items guaranteed to be unique."""
def __init__(self, buflen=21):
self.items = []
self.old = collections.deque(maxlen=buflen)
def pop_all(self):
"""Return all items and remove them from the buffer."""
ret = []
for i in self.items:
if i not in self.old:
ret.append(i)
self.old.append(i)
self.items = []
return ret
def atoi(string):
s = []
for i in string:
try:
int(i)
s.append(i)
except ValueError:
break
return int(''.join(s))
def dict_from_cookie_str(cookie_str):
c = http.cookies.SimpleCookie()
d = dict()
c.load(cookie_str)
for k, m in c.items():
d[k] = m.value
return d
<commit_msg>Remove default buflen value in UniqueBuffer.__init__()<commit_after>import collections
import http.cookies
import logging
class UniqueBuffer:
"""A simple deduplicating buffer. To add new items, manipulate self.items.
The actual buffer is not limited in length, the `buflen` argument is used
to specify the amount of items guaranteed to be unique."""
def __init__(self, buflen):
self.items = []
self.old = collections.deque(maxlen=buflen)
def pop_all(self):
"""Return all items and remove them from the buffer."""
ret = []
for i in self.items:
if i not in self.old:
ret.append(i)
self.old.append(i)
self.items = []
return ret
def atoi(string):
s = []
for i in string:
try:
int(i)
s.append(i)
except ValueError:
break
return int(''.join(s))
def dict_from_cookie_str(cookie_str):
c = http.cookies.SimpleCookie()
d = dict()
c.load(cookie_str)
for k, m in c.items():
d[k] = m.value
return d
|
0e8a17868731f459d15b754ac0d9cda5a4321a4a | tasks/check_einstein.py | tasks/check_einstein.py | import json
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class CheckPermSetLicenses(BaseSalesforceApiTask):
task_options = {
"permission_sets": {
"description": "List of permission set names to check for, (ex: EinsteinAnalyticsUser)",
"required": True,
}
}
def _run_task(self):
query = self._get_query()
result = self.tooling.query(query)
return result["size"] > 0
def _get_query(self):
where_targets = [f"'{name}'" for name in self.options["permission_sets"]]
return f"""
SELECT Name FROM PermissionSet WHERE Name IN ({','.join(where_targets)})
"""
| import json
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class CheckPermSetLicenses(BaseSalesforceApiTask):
task_options = {
"permission_sets": {
"description": "List of permission set names to check for, (ex: EinsteinAnalyticsUser)",
"required": True,
}
}
def _run_task(self):
query = self._get_query()
result = self.tooling.query(query)
if result["size"] > 0:
self.return_values["has_einstein_perms"] = True
def _get_query(self):
where_targets = [f"'{name}'" for name in self.options["permission_sets"]]
return f"""
SELECT Name FROM PermissionSet WHERE Name IN ({','.join(where_targets)})
"""
| Fix analytics template preflight check | Fix analytics template preflight check
Returning from _run_task didn't work, and using
```python
self.return_values = True
```
results in `return_values` being coerced to an empty dictionary that
evaluates as False.
| Python | bsd-3-clause | SalesforceFoundation/HEDAP,SalesforceFoundation/HEDAP,SalesforceFoundation/HEDAP | import json
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class CheckPermSetLicenses(BaseSalesforceApiTask):
task_options = {
"permission_sets": {
"description": "List of permission set names to check for, (ex: EinsteinAnalyticsUser)",
"required": True,
}
}
def _run_task(self):
query = self._get_query()
result = self.tooling.query(query)
return result["size"] > 0
def _get_query(self):
where_targets = [f"'{name}'" for name in self.options["permission_sets"]]
return f"""
SELECT Name FROM PermissionSet WHERE Name IN ({','.join(where_targets)})
"""
Fix analytics template preflight check
Returning from _run_task didn't work, and using
```python
self.return_values = True
```
results in `return_values` being coerced to an empty dictionary that
evaluates as False. | import json
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class CheckPermSetLicenses(BaseSalesforceApiTask):
task_options = {
"permission_sets": {
"description": "List of permission set names to check for, (ex: EinsteinAnalyticsUser)",
"required": True,
}
}
def _run_task(self):
query = self._get_query()
result = self.tooling.query(query)
if result["size"] > 0:
self.return_values["has_einstein_perms"] = True
def _get_query(self):
where_targets = [f"'{name}'" for name in self.options["permission_sets"]]
return f"""
SELECT Name FROM PermissionSet WHERE Name IN ({','.join(where_targets)})
"""
| <commit_before>import json
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class CheckPermSetLicenses(BaseSalesforceApiTask):
task_options = {
"permission_sets": {
"description": "List of permission set names to check for, (ex: EinsteinAnalyticsUser)",
"required": True,
}
}
def _run_task(self):
query = self._get_query()
result = self.tooling.query(query)
return result["size"] > 0
def _get_query(self):
where_targets = [f"'{name}'" for name in self.options["permission_sets"]]
return f"""
SELECT Name FROM PermissionSet WHERE Name IN ({','.join(where_targets)})
"""
<commit_msg>Fix analytics template preflight check
Returning from _run_task didn't work, and using
```python
self.return_values = True
```
results in `return_values` being coerced to an empty dictionary that
evaluates as False.<commit_after> | import json
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class CheckPermSetLicenses(BaseSalesforceApiTask):
task_options = {
"permission_sets": {
"description": "List of permission set names to check for, (ex: EinsteinAnalyticsUser)",
"required": True,
}
}
def _run_task(self):
query = self._get_query()
result = self.tooling.query(query)
if result["size"] > 0:
self.return_values["has_einstein_perms"] = True
def _get_query(self):
where_targets = [f"'{name}'" for name in self.options["permission_sets"]]
return f"""
SELECT Name FROM PermissionSet WHERE Name IN ({','.join(where_targets)})
"""
| import json
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class CheckPermSetLicenses(BaseSalesforceApiTask):
task_options = {
"permission_sets": {
"description": "List of permission set names to check for, (ex: EinsteinAnalyticsUser)",
"required": True,
}
}
def _run_task(self):
query = self._get_query()
result = self.tooling.query(query)
return result["size"] > 0
def _get_query(self):
where_targets = [f"'{name}'" for name in self.options["permission_sets"]]
return f"""
SELECT Name FROM PermissionSet WHERE Name IN ({','.join(where_targets)})
"""
Fix analytics template preflight check
Returning from _run_task didn't work, and using
```python
self.return_values = True
```
results in `return_values` being coerced to an empty dictionary that
evaluates as False.import json
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class CheckPermSetLicenses(BaseSalesforceApiTask):
task_options = {
"permission_sets": {
"description": "List of permission set names to check for, (ex: EinsteinAnalyticsUser)",
"required": True,
}
}
def _run_task(self):
query = self._get_query()
result = self.tooling.query(query)
if result["size"] > 0:
self.return_values["has_einstein_perms"] = True
def _get_query(self):
where_targets = [f"'{name}'" for name in self.options["permission_sets"]]
return f"""
SELECT Name FROM PermissionSet WHERE Name IN ({','.join(where_targets)})
"""
| <commit_before>import json
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class CheckPermSetLicenses(BaseSalesforceApiTask):
task_options = {
"permission_sets": {
"description": "List of permission set names to check for, (ex: EinsteinAnalyticsUser)",
"required": True,
}
}
def _run_task(self):
query = self._get_query()
result = self.tooling.query(query)
return result["size"] > 0
def _get_query(self):
where_targets = [f"'{name}'" for name in self.options["permission_sets"]]
return f"""
SELECT Name FROM PermissionSet WHERE Name IN ({','.join(where_targets)})
"""
<commit_msg>Fix analytics template preflight check
Returning from _run_task didn't work, and using
```python
self.return_values = True
```
results in `return_values` being coerced to an empty dictionary that
evaluates as False.<commit_after>import json
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class CheckPermSetLicenses(BaseSalesforceApiTask):
task_options = {
"permission_sets": {
"description": "List of permission set names to check for, (ex: EinsteinAnalyticsUser)",
"required": True,
}
}
def _run_task(self):
query = self._get_query()
result = self.tooling.query(query)
if result["size"] > 0:
self.return_values["has_einstein_perms"] = True
def _get_query(self):
where_targets = [f"'{name}'" for name in self.options["permission_sets"]]
return f"""
SELECT Name FROM PermissionSet WHERE Name IN ({','.join(where_targets)})
"""
|
d3dbe3e9788c312c79d78d4292b0d2792605a3c4 | setup.py | setup.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2018 OneLogin, Inc.
# MIT License
from setuptools import setup
setup(
name='python-saml',
version='2.4.1',
description='Onelogin Python Toolkit. Add SAML support to your Python software using this library',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='OneLogin',
author_email='support@onelogin.com',
license='MIT',
url='https://github.com/onelogin/python-saml',
packages=['onelogin', 'onelogin/saml2'],
include_package_data=True,
package_data={
'onelogin/saml2/schemas': ['*.xsd'],
},
package_dir={
'': 'src',
},
test_suite='tests',
install_requires=[
'dm.xmlsec.binding==1.3.3',
'isodate>=0.5.0',
'defusedxml>=0.4.1',
],
extras_require={
'test': (
'coverage>=3.6',
'freezegun==0.3.5',
'pylint==1.9.1',
'pep8==1.5.7',
'pyflakes==0.8.1',
'coveralls==1.1',
),
},
keywords='saml saml2 xmlsec django flask',
)
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2018 OneLogin, Inc.
# MIT License
from setuptools import setup
setup(
name='python-saml',
version='2.4.1',
description='Onelogin Python Toolkit. Add SAML support to your Python software using this library',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='OneLogin',
author_email='support@onelogin.com',
license='MIT',
url='https://github.com/onelogin/python-saml',
packages=['onelogin', 'onelogin/saml2'],
include_package_data=True,
package_data={
'onelogin/saml2/schemas': ['*.xsd'],
},
package_dir={
'': 'src',
},
test_suite='tests',
install_requires=[
'dm.xmlsec.binding==1.3.7',
'isodate>=0.5.0',
'defusedxml>=0.4.1',
],
extras_require={
'test': (
'coverage>=3.6',
'freezegun==0.3.5',
'pylint==1.9.1',
'pep8==1.5.7',
'pyflakes==0.8.1',
'coveralls==1.1',
),
},
keywords='saml saml2 xmlsec django flask',
)
| Update dm.xmlsec.binding dependency to 1.3.7 | Update dm.xmlsec.binding dependency to 1.3.7
| Python | mit | onelogin/python-saml,onelogin/python-saml | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2018 OneLogin, Inc.
# MIT License
from setuptools import setup
setup(
name='python-saml',
version='2.4.1',
description='Onelogin Python Toolkit. Add SAML support to your Python software using this library',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='OneLogin',
author_email='support@onelogin.com',
license='MIT',
url='https://github.com/onelogin/python-saml',
packages=['onelogin', 'onelogin/saml2'],
include_package_data=True,
package_data={
'onelogin/saml2/schemas': ['*.xsd'],
},
package_dir={
'': 'src',
},
test_suite='tests',
install_requires=[
'dm.xmlsec.binding==1.3.3',
'isodate>=0.5.0',
'defusedxml>=0.4.1',
],
extras_require={
'test': (
'coverage>=3.6',
'freezegun==0.3.5',
'pylint==1.9.1',
'pep8==1.5.7',
'pyflakes==0.8.1',
'coveralls==1.1',
),
},
keywords='saml saml2 xmlsec django flask',
)
Update dm.xmlsec.binding dependency to 1.3.7 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2018 OneLogin, Inc.
# MIT License
from setuptools import setup
setup(
name='python-saml',
version='2.4.1',
description='Onelogin Python Toolkit. Add SAML support to your Python software using this library',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='OneLogin',
author_email='support@onelogin.com',
license='MIT',
url='https://github.com/onelogin/python-saml',
packages=['onelogin', 'onelogin/saml2'],
include_package_data=True,
package_data={
'onelogin/saml2/schemas': ['*.xsd'],
},
package_dir={
'': 'src',
},
test_suite='tests',
install_requires=[
'dm.xmlsec.binding==1.3.7',
'isodate>=0.5.0',
'defusedxml>=0.4.1',
],
extras_require={
'test': (
'coverage>=3.6',
'freezegun==0.3.5',
'pylint==1.9.1',
'pep8==1.5.7',
'pyflakes==0.8.1',
'coveralls==1.1',
),
},
keywords='saml saml2 xmlsec django flask',
)
| <commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2018 OneLogin, Inc.
# MIT License
from setuptools import setup
setup(
name='python-saml',
version='2.4.1',
description='Onelogin Python Toolkit. Add SAML support to your Python software using this library',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='OneLogin',
author_email='support@onelogin.com',
license='MIT',
url='https://github.com/onelogin/python-saml',
packages=['onelogin', 'onelogin/saml2'],
include_package_data=True,
package_data={
'onelogin/saml2/schemas': ['*.xsd'],
},
package_dir={
'': 'src',
},
test_suite='tests',
install_requires=[
'dm.xmlsec.binding==1.3.3',
'isodate>=0.5.0',
'defusedxml>=0.4.1',
],
extras_require={
'test': (
'coverage>=3.6',
'freezegun==0.3.5',
'pylint==1.9.1',
'pep8==1.5.7',
'pyflakes==0.8.1',
'coveralls==1.1',
),
},
keywords='saml saml2 xmlsec django flask',
)
<commit_msg>Update dm.xmlsec.binding dependency to 1.3.7<commit_after> | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2018 OneLogin, Inc.
# MIT License
from setuptools import setup
setup(
name='python-saml',
version='2.4.1',
description='Onelogin Python Toolkit. Add SAML support to your Python software using this library',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='OneLogin',
author_email='support@onelogin.com',
license='MIT',
url='https://github.com/onelogin/python-saml',
packages=['onelogin', 'onelogin/saml2'],
include_package_data=True,
package_data={
'onelogin/saml2/schemas': ['*.xsd'],
},
package_dir={
'': 'src',
},
test_suite='tests',
install_requires=[
'dm.xmlsec.binding==1.3.7',
'isodate>=0.5.0',
'defusedxml>=0.4.1',
],
extras_require={
'test': (
'coverage>=3.6',
'freezegun==0.3.5',
'pylint==1.9.1',
'pep8==1.5.7',
'pyflakes==0.8.1',
'coveralls==1.1',
),
},
keywords='saml saml2 xmlsec django flask',
)
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2018 OneLogin, Inc.
# MIT License
from setuptools import setup
setup(
name='python-saml',
version='2.4.1',
description='Onelogin Python Toolkit. Add SAML support to your Python software using this library',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='OneLogin',
author_email='support@onelogin.com',
license='MIT',
url='https://github.com/onelogin/python-saml',
packages=['onelogin', 'onelogin/saml2'],
include_package_data=True,
package_data={
'onelogin/saml2/schemas': ['*.xsd'],
},
package_dir={
'': 'src',
},
test_suite='tests',
install_requires=[
'dm.xmlsec.binding==1.3.3',
'isodate>=0.5.0',
'defusedxml>=0.4.1',
],
extras_require={
'test': (
'coverage>=3.6',
'freezegun==0.3.5',
'pylint==1.9.1',
'pep8==1.5.7',
'pyflakes==0.8.1',
'coveralls==1.1',
),
},
keywords='saml saml2 xmlsec django flask',
)
Update dm.xmlsec.binding dependency to 1.3.7#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2018 OneLogin, Inc.
# MIT License
from setuptools import setup
setup(
name='python-saml',
version='2.4.1',
description='Onelogin Python Toolkit. Add SAML support to your Python software using this library',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='OneLogin',
author_email='support@onelogin.com',
license='MIT',
url='https://github.com/onelogin/python-saml',
packages=['onelogin', 'onelogin/saml2'],
include_package_data=True,
package_data={
'onelogin/saml2/schemas': ['*.xsd'],
},
package_dir={
'': 'src',
},
test_suite='tests',
install_requires=[
'dm.xmlsec.binding==1.3.7',
'isodate>=0.5.0',
'defusedxml>=0.4.1',
],
extras_require={
'test': (
'coverage>=3.6',
'freezegun==0.3.5',
'pylint==1.9.1',
'pep8==1.5.7',
'pyflakes==0.8.1',
'coveralls==1.1',
),
},
keywords='saml saml2 xmlsec django flask',
)
| <commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2018 OneLogin, Inc.
# MIT License
from setuptools import setup
setup(
name='python-saml',
version='2.4.1',
description='Onelogin Python Toolkit. Add SAML support to your Python software using this library',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='OneLogin',
author_email='support@onelogin.com',
license='MIT',
url='https://github.com/onelogin/python-saml',
packages=['onelogin', 'onelogin/saml2'],
include_package_data=True,
package_data={
'onelogin/saml2/schemas': ['*.xsd'],
},
package_dir={
'': 'src',
},
test_suite='tests',
install_requires=[
'dm.xmlsec.binding==1.3.3',
'isodate>=0.5.0',
'defusedxml>=0.4.1',
],
extras_require={
'test': (
'coverage>=3.6',
'freezegun==0.3.5',
'pylint==1.9.1',
'pep8==1.5.7',
'pyflakes==0.8.1',
'coveralls==1.1',
),
},
keywords='saml saml2 xmlsec django flask',
)
<commit_msg>Update dm.xmlsec.binding dependency to 1.3.7<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2018 OneLogin, Inc.
# MIT License
from setuptools import setup
setup(
name='python-saml',
version='2.4.1',
description='Onelogin Python Toolkit. Add SAML support to your Python software using this library',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='OneLogin',
author_email='support@onelogin.com',
license='MIT',
url='https://github.com/onelogin/python-saml',
packages=['onelogin', 'onelogin/saml2'],
include_package_data=True,
package_data={
'onelogin/saml2/schemas': ['*.xsd'],
},
package_dir={
'': 'src',
},
test_suite='tests',
install_requires=[
'dm.xmlsec.binding==1.3.7',
'isodate>=0.5.0',
'defusedxml>=0.4.1',
],
extras_require={
'test': (
'coverage>=3.6',
'freezegun==0.3.5',
'pylint==1.9.1',
'pep8==1.5.7',
'pyflakes==0.8.1',
'coveralls==1.1',
),
},
keywords='saml saml2 xmlsec django flask',
)
|
4af38a7f96199e31b3e37a04bb630d307399aed7 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.1.1',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.23',
'future>=0.16,<0.18',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<3.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
| from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.1.1',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.23',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<3.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
| Update future requirement from <0.18,>=0.16 to >=0.16,<0.19 | Update future requirement from <0.18,>=0.16 to >=0.16,<0.19
Updates the requirements on [future](https://github.com/PythonCharmers/python-future) to permit the latest version.
- [Release notes](https://github.com/PythonCharmers/python-future/releases)
- [Changelog](https://github.com/PythonCharmers/python-future/blob/master/docs/changelog.rst)
- [Commits](https://github.com/PythonCharmers/python-future/commits)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com> | Python | apache-2.0 | zooniverse/panoptes-python-client | from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.1.1',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.23',
'future>=0.16,<0.18',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<3.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
Update future requirement from <0.18,>=0.16 to >=0.16,<0.19
Updates the requirements on [future](https://github.com/PythonCharmers/python-future) to permit the latest version.
- [Release notes](https://github.com/PythonCharmers/python-future/releases)
- [Changelog](https://github.com/PythonCharmers/python-future/blob/master/docs/changelog.rst)
- [Commits](https://github.com/PythonCharmers/python-future/commits)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com> | from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.1.1',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.23',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<3.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.1.1',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.23',
'future>=0.16,<0.18',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<3.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
<commit_msg>Update future requirement from <0.18,>=0.16 to >=0.16,<0.19
Updates the requirements on [future](https://github.com/PythonCharmers/python-future) to permit the latest version.
- [Release notes](https://github.com/PythonCharmers/python-future/releases)
- [Changelog](https://github.com/PythonCharmers/python-future/blob/master/docs/changelog.rst)
- [Commits](https://github.com/PythonCharmers/python-future/commits)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com><commit_after> | from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.1.1',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.23',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<3.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
| from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.1.1',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.23',
'future>=0.16,<0.18',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<3.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
Update future requirement from <0.18,>=0.16 to >=0.16,<0.19
Updates the requirements on [future](https://github.com/PythonCharmers/python-future) to permit the latest version.
- [Release notes](https://github.com/PythonCharmers/python-future/releases)
- [Changelog](https://github.com/PythonCharmers/python-future/blob/master/docs/changelog.rst)
- [Commits](https://github.com/PythonCharmers/python-future/commits)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com>from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.1.1',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.23',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<3.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.1.1',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.23',
'future>=0.16,<0.18',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<3.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
<commit_msg>Update future requirement from <0.18,>=0.16 to >=0.16,<0.19
Updates the requirements on [future](https://github.com/PythonCharmers/python-future) to permit the latest version.
- [Release notes](https://github.com/PythonCharmers/python-future/releases)
- [Changelog](https://github.com/PythonCharmers/python-future/blob/master/docs/changelog.rst)
- [Commits](https://github.com/PythonCharmers/python-future/commits)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com><commit_after>from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.1.1',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.23',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<3.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
|
5ed65dbf5541fafbe2dca389a1fdda1c01c229d8 | setup.py | setup.py | import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='Apache 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Filesystems',
'Topic :: Systems Administration'
],
install_requires = [
'stratisd-client-dbus>=0.06'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
| import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='Apache 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Filesystems',
'Topic :: Systems Administration'
],
install_requires = [
'stratisd-client-dbus>=0.07'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
| Update stratisd-client-dbus requirement to 0.07 | Update stratisd-client-dbus requirement to 0.07
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com>
| Python | apache-2.0 | stratis-storage/stratis-cli,stratis-storage/stratis-cli | import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='Apache 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Filesystems',
'Topic :: Systems Administration'
],
install_requires = [
'stratisd-client-dbus>=0.06'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
Update stratisd-client-dbus requirement to 0.07
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com> | import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='Apache 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Filesystems',
'Topic :: Systems Administration'
],
install_requires = [
'stratisd-client-dbus>=0.07'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
| <commit_before>import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='Apache 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Filesystems',
'Topic :: Systems Administration'
],
install_requires = [
'stratisd-client-dbus>=0.06'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
<commit_msg>Update stratisd-client-dbus requirement to 0.07
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com><commit_after> | import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='Apache 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Filesystems',
'Topic :: Systems Administration'
],
install_requires = [
'stratisd-client-dbus>=0.07'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
| import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='Apache 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Filesystems',
'Topic :: Systems Administration'
],
install_requires = [
'stratisd-client-dbus>=0.06'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
Update stratisd-client-dbus requirement to 0.07
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com>import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='Apache 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Filesystems',
'Topic :: Systems Administration'
],
install_requires = [
'stratisd-client-dbus>=0.07'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
| <commit_before>import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='Apache 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Filesystems',
'Topic :: Systems Administration'
],
install_requires = [
'stratisd-client-dbus>=0.06'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
<commit_msg>Update stratisd-client-dbus requirement to 0.07
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com><commit_after>import os
import sys
import setuptools
if sys.version_info[0] < 3:
from codecs import open
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
README = local_file("README.rst")
with open(local_file("src/stratis_cli/_version.py")) as o:
exec(o.read())
setuptools.setup(
name='stratis-cli',
version=__version__,
author='Anne Mulhern',
author_email='amulhern@redhat.com',
description='prototype stratis cli',
long_description=open(README, encoding='utf-8').read(),
platforms=['Linux'],
license='Apache 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Filesystems',
'Topic :: Systems Administration'
],
install_requires = [
'stratisd-client-dbus>=0.07'
],
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
scripts=['bin/stratis']
)
|
04229ec331c0259a07f5d22a636b65aac60407f8 | setup.py | setup.py | from distutils.core import setup
def main():
setup(
name = 'logging_levels',
packages=['logging_levels'],
package_dir = {'logging_levels':'logging_levels'},
version = open('VERSION.txt').read().strip(),
author='Mike Thornton',
author_email='six8@devdetails.com',
url='https://github.com/six8/logging-levels',
download_url='https://github.com/six8/logging-levels',
keywords=['logging'],
license='MIT',
description="Add logging levels for when DEBUG just isn't enough.",
classifiers = [
"Programming Language :: Python",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.rst').read(),
)
if __name__ == '__main__':
main() | from distutils.core import setup
def main():
setup(
name = 'logging_levels',
packages=['logging_levels'],
package_dir = {'logging_levels':'logging_levels'},
version = open('VERSION.txt').read().strip(),
author='Mike Thornton',
author_email='six8@devdetails.com',
url='https://github.com/six8/logging-levels',
download_url='https://github.com/six8/logging-levels',
keywords=['logging'],
license='MIT',
description="Add logging levels for when DEBUG just isn't enough.",
classifiers = [
"Programming Language :: Python",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.rst').read(),
)
if __name__ == '__main__':
main() | Add Trove classifiers for 2.x and 3.x | Add Trove classifiers for 2.x and 3.x
| Python | mit | six8/logging-levels | from distutils.core import setup
def main():
setup(
name = 'logging_levels',
packages=['logging_levels'],
package_dir = {'logging_levels':'logging_levels'},
version = open('VERSION.txt').read().strip(),
author='Mike Thornton',
author_email='six8@devdetails.com',
url='https://github.com/six8/logging-levels',
download_url='https://github.com/six8/logging-levels',
keywords=['logging'],
license='MIT',
description="Add logging levels for when DEBUG just isn't enough.",
classifiers = [
"Programming Language :: Python",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.rst').read(),
)
if __name__ == '__main__':
main()Add Trove classifiers for 2.x and 3.x | from distutils.core import setup
def main():
setup(
name = 'logging_levels',
packages=['logging_levels'],
package_dir = {'logging_levels':'logging_levels'},
version = open('VERSION.txt').read().strip(),
author='Mike Thornton',
author_email='six8@devdetails.com',
url='https://github.com/six8/logging-levels',
download_url='https://github.com/six8/logging-levels',
keywords=['logging'],
license='MIT',
description="Add logging levels for when DEBUG just isn't enough.",
classifiers = [
"Programming Language :: Python",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.rst').read(),
)
if __name__ == '__main__':
main() | <commit_before>from distutils.core import setup
def main():
setup(
name = 'logging_levels',
packages=['logging_levels'],
package_dir = {'logging_levels':'logging_levels'},
version = open('VERSION.txt').read().strip(),
author='Mike Thornton',
author_email='six8@devdetails.com',
url='https://github.com/six8/logging-levels',
download_url='https://github.com/six8/logging-levels',
keywords=['logging'],
license='MIT',
description="Add logging levels for when DEBUG just isn't enough.",
classifiers = [
"Programming Language :: Python",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.rst').read(),
)
if __name__ == '__main__':
main()<commit_msg>Add Trove classifiers for 2.x and 3.x<commit_after> | from distutils.core import setup
def main():
setup(
name = 'logging_levels',
packages=['logging_levels'],
package_dir = {'logging_levels':'logging_levels'},
version = open('VERSION.txt').read().strip(),
author='Mike Thornton',
author_email='six8@devdetails.com',
url='https://github.com/six8/logging-levels',
download_url='https://github.com/six8/logging-levels',
keywords=['logging'],
license='MIT',
description="Add logging levels for when DEBUG just isn't enough.",
classifiers = [
"Programming Language :: Python",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.rst').read(),
)
if __name__ == '__main__':
main() | from distutils.core import setup
def main():
setup(
name = 'logging_levels',
packages=['logging_levels'],
package_dir = {'logging_levels':'logging_levels'},
version = open('VERSION.txt').read().strip(),
author='Mike Thornton',
author_email='six8@devdetails.com',
url='https://github.com/six8/logging-levels',
download_url='https://github.com/six8/logging-levels',
keywords=['logging'],
license='MIT',
description="Add logging levels for when DEBUG just isn't enough.",
classifiers = [
"Programming Language :: Python",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.rst').read(),
)
if __name__ == '__main__':
main()Add Trove classifiers for 2.x and 3.xfrom distutils.core import setup
def main():
setup(
name = 'logging_levels',
packages=['logging_levels'],
package_dir = {'logging_levels':'logging_levels'},
version = open('VERSION.txt').read().strip(),
author='Mike Thornton',
author_email='six8@devdetails.com',
url='https://github.com/six8/logging-levels',
download_url='https://github.com/six8/logging-levels',
keywords=['logging'],
license='MIT',
description="Add logging levels for when DEBUG just isn't enough.",
classifiers = [
"Programming Language :: Python",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.rst').read(),
)
if __name__ == '__main__':
main() | <commit_before>from distutils.core import setup
def main():
setup(
name = 'logging_levels',
packages=['logging_levels'],
package_dir = {'logging_levels':'logging_levels'},
version = open('VERSION.txt').read().strip(),
author='Mike Thornton',
author_email='six8@devdetails.com',
url='https://github.com/six8/logging-levels',
download_url='https://github.com/six8/logging-levels',
keywords=['logging'],
license='MIT',
description="Add logging levels for when DEBUG just isn't enough.",
classifiers = [
"Programming Language :: Python",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.rst').read(),
)
if __name__ == '__main__':
main()<commit_msg>Add Trove classifiers for 2.x and 3.x<commit_after>from distutils.core import setup
def main():
setup(
name = 'logging_levels',
packages=['logging_levels'],
package_dir = {'logging_levels':'logging_levels'},
version = open('VERSION.txt').read().strip(),
author='Mike Thornton',
author_email='six8@devdetails.com',
url='https://github.com/six8/logging-levels',
download_url='https://github.com/six8/logging-levels',
keywords=['logging'],
license='MIT',
description="Add logging levels for when DEBUG just isn't enough.",
classifiers = [
"Programming Language :: Python",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=open('README.rst').read(),
)
if __name__ == '__main__':
main() |
fdf68c774244c86d061bb095fdbddb943c4c653d | setup.py | setup.py | from setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name = 'heatmappy',
packages = ['heatmappy'],
version = '0.1',
description = 'Draw image heatmaps in python',
author = 'Lumen Research',
author_email = 'development@lumen-research.com',
url = 'https://github.com/LumenResearch/heatmappy',
download_url = 'https://github.com/LumenResearch/heatmappy/tarball/0.1',
keywords = ['image', 'heatmap', 'heat map'],
install_requires=required,
classifiers = [
'Programming Language :: Python :: 3'
],
include_package_data=True,
) | from setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name = 'heatmappy',
packages = ['heatmappy'],
version = '0.1.0',
description = 'Draw image heatmaps in python',
author = 'Lumen Research',
author_email = 'development@lumen-research.com',
url = 'https://github.com/LumenResearch/heatmappy',
download_url = 'https://github.com/LumenResearch/heatmappy/tarball/0.1.1',
keywords = ['image', 'heatmap', 'heat map'],
install_requires=required,
classifiers = [
'Programming Language :: Python :: 3'
],
include_package_data=True,
) | Make version compliant with pip | Make version compliant with pip
| Python | mit | LumenResearch/heatmappy | from setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name = 'heatmappy',
packages = ['heatmappy'],
version = '0.1',
description = 'Draw image heatmaps in python',
author = 'Lumen Research',
author_email = 'development@lumen-research.com',
url = 'https://github.com/LumenResearch/heatmappy',
download_url = 'https://github.com/LumenResearch/heatmappy/tarball/0.1',
keywords = ['image', 'heatmap', 'heat map'],
install_requires=required,
classifiers = [
'Programming Language :: Python :: 3'
],
include_package_data=True,
)Make version compliant with pip | from setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name = 'heatmappy',
packages = ['heatmappy'],
version = '0.1.0',
description = 'Draw image heatmaps in python',
author = 'Lumen Research',
author_email = 'development@lumen-research.com',
url = 'https://github.com/LumenResearch/heatmappy',
download_url = 'https://github.com/LumenResearch/heatmappy/tarball/0.1.1',
keywords = ['image', 'heatmap', 'heat map'],
install_requires=required,
classifiers = [
'Programming Language :: Python :: 3'
],
include_package_data=True,
) | <commit_before>from setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name = 'heatmappy',
packages = ['heatmappy'],
version = '0.1',
description = 'Draw image heatmaps in python',
author = 'Lumen Research',
author_email = 'development@lumen-research.com',
url = 'https://github.com/LumenResearch/heatmappy',
download_url = 'https://github.com/LumenResearch/heatmappy/tarball/0.1',
keywords = ['image', 'heatmap', 'heat map'],
install_requires=required,
classifiers = [
'Programming Language :: Python :: 3'
],
include_package_data=True,
)<commit_msg>Make version compliant with pip<commit_after> | from setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name = 'heatmappy',
packages = ['heatmappy'],
version = '0.1.0',
description = 'Draw image heatmaps in python',
author = 'Lumen Research',
author_email = 'development@lumen-research.com',
url = 'https://github.com/LumenResearch/heatmappy',
download_url = 'https://github.com/LumenResearch/heatmappy/tarball/0.1.1',
keywords = ['image', 'heatmap', 'heat map'],
install_requires=required,
classifiers = [
'Programming Language :: Python :: 3'
],
include_package_data=True,
) | from setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name = 'heatmappy',
packages = ['heatmappy'],
version = '0.1',
description = 'Draw image heatmaps in python',
author = 'Lumen Research',
author_email = 'development@lumen-research.com',
url = 'https://github.com/LumenResearch/heatmappy',
download_url = 'https://github.com/LumenResearch/heatmappy/tarball/0.1',
keywords = ['image', 'heatmap', 'heat map'],
install_requires=required,
classifiers = [
'Programming Language :: Python :: 3'
],
include_package_data=True,
)Make version compliant with pipfrom setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name = 'heatmappy',
packages = ['heatmappy'],
version = '0.1.0',
description = 'Draw image heatmaps in python',
author = 'Lumen Research',
author_email = 'development@lumen-research.com',
url = 'https://github.com/LumenResearch/heatmappy',
download_url = 'https://github.com/LumenResearch/heatmappy/tarball/0.1.1',
keywords = ['image', 'heatmap', 'heat map'],
install_requires=required,
classifiers = [
'Programming Language :: Python :: 3'
],
include_package_data=True,
) | <commit_before>from setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name = 'heatmappy',
packages = ['heatmappy'],
version = '0.1',
description = 'Draw image heatmaps in python',
author = 'Lumen Research',
author_email = 'development@lumen-research.com',
url = 'https://github.com/LumenResearch/heatmappy',
download_url = 'https://github.com/LumenResearch/heatmappy/tarball/0.1',
keywords = ['image', 'heatmap', 'heat map'],
install_requires=required,
classifiers = [
'Programming Language :: Python :: 3'
],
include_package_data=True,
)<commit_msg>Make version compliant with pip<commit_after>from setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name = 'heatmappy',
packages = ['heatmappy'],
version = '0.1.0',
description = 'Draw image heatmaps in python',
author = 'Lumen Research',
author_email = 'development@lumen-research.com',
url = 'https://github.com/LumenResearch/heatmappy',
download_url = 'https://github.com/LumenResearch/heatmappy/tarball/0.1.1',
keywords = ['image', 'heatmap', 'heat map'],
install_requires=required,
classifiers = [
'Programming Language :: Python :: 3'
],
include_package_data=True,
) |
447beba3ab1907c68514cb9a79f99fbf5568e9bf | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.1.3',
'cvxopt==1.2.5.post1',
'statsmodels==0.12.1',
]
)
| #!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.1.4',
'cvxopt==1.2.5.post1',
'statsmodels==0.12.1',
]
)
| Bump pandas from 1.1.3 to 1.1.4 | Bump pandas from 1.1.3 to 1.1.4
Bumps [pandas](https://github.com/pandas-dev/pandas) from 1.1.3 to 1.1.4.
- [Release notes](https://github.com/pandas-dev/pandas/releases)
- [Changelog](https://github.com/pandas-dev/pandas/blob/master/RELEASE.md)
- [Commits](https://github.com/pandas-dev/pandas/compare/v1.1.3...v1.1.4)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com> | Python | apache-2.0 | bugra/l1 | #!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.1.3',
'cvxopt==1.2.5.post1',
'statsmodels==0.12.1',
]
)
Bump pandas from 1.1.3 to 1.1.4
Bumps [pandas](https://github.com/pandas-dev/pandas) from 1.1.3 to 1.1.4.
- [Release notes](https://github.com/pandas-dev/pandas/releases)
- [Changelog](https://github.com/pandas-dev/pandas/blob/master/RELEASE.md)
- [Commits](https://github.com/pandas-dev/pandas/compare/v1.1.3...v1.1.4)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com> | #!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.1.4',
'cvxopt==1.2.5.post1',
'statsmodels==0.12.1',
]
)
| <commit_before>#!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.1.3',
'cvxopt==1.2.5.post1',
'statsmodels==0.12.1',
]
)
<commit_msg>Bump pandas from 1.1.3 to 1.1.4
Bumps [pandas](https://github.com/pandas-dev/pandas) from 1.1.3 to 1.1.4.
- [Release notes](https://github.com/pandas-dev/pandas/releases)
- [Changelog](https://github.com/pandas-dev/pandas/blob/master/RELEASE.md)
- [Commits](https://github.com/pandas-dev/pandas/compare/v1.1.3...v1.1.4)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com><commit_after> | #!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.1.4',
'cvxopt==1.2.5.post1',
'statsmodels==0.12.1',
]
)
| #!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.1.3',
'cvxopt==1.2.5.post1',
'statsmodels==0.12.1',
]
)
Bump pandas from 1.1.3 to 1.1.4
Bumps [pandas](https://github.com/pandas-dev/pandas) from 1.1.3 to 1.1.4.
- [Release notes](https://github.com/pandas-dev/pandas/releases)
- [Changelog](https://github.com/pandas-dev/pandas/blob/master/RELEASE.md)
- [Commits](https://github.com/pandas-dev/pandas/compare/v1.1.3...v1.1.4)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com>#!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.1.4',
'cvxopt==1.2.5.post1',
'statsmodels==0.12.1',
]
)
| <commit_before>#!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.1.3',
'cvxopt==1.2.5.post1',
'statsmodels==0.12.1',
]
)
<commit_msg>Bump pandas from 1.1.3 to 1.1.4
Bumps [pandas](https://github.com/pandas-dev/pandas) from 1.1.3 to 1.1.4.
- [Release notes](https://github.com/pandas-dev/pandas/releases)
- [Changelog](https://github.com/pandas-dev/pandas/blob/master/RELEASE.md)
- [Commits](https://github.com/pandas-dev/pandas/compare/v1.1.3...v1.1.4)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com><commit_after>#!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.1.4',
'cvxopt==1.2.5.post1',
'statsmodels==0.12.1',
]
)
|
3c8099a8fd577f825462ff4b23e47700af26f3d1 | setup.py | setup.py | import os
import shutil
import sys
from setuptools import setup
from setuptools.command.install import install
import bzt
class InstallWithHook(install, object):
"""
Command adding post-install hook to setup
"""
def run(self):
"""
Do the command's job!
"""
install.run(self)
self.__hook()
def __hook(self):
dirname = os.getenv("VIRTUAL_ENV", "") + os.path.sep + "etc" + os.path.sep + "bzt.d"
sys.stdout.write("Creating %s" % dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
src = os.path.dirname(__file__)
src += os.path.sep + "bzt" + os.path.sep + "10-base.json"
sys.stdout.write("Copying %s to %s" % (src, dirname))
shutil.copy(src, dirname + os.path.sep)
setup(
name="bzt",
version=bzt.version,
install_requires=[
'pyyaml', 'psutil', 'colorlog', 'lxml', 'cssselect', 'urwid'
],
packages=['bzt', 'bzt.modules'],
entry_points={
'console_scripts': [
'bzt=bzt.cli:main',
],
},
package_data={
"bzt": []
},
cmdclass=dict(install=InstallWithHook)
)
| import os
import shutil
import sys
from setuptools import setup
from setuptools.command.install import install
import bzt
class InstallWithHook(install, object):
"""
Command adding post-install hook to setup
"""
def run(self):
"""
Do the command's job!
"""
install.run(self)
self.__hook()
def __hook(self):
dirname = os.getenv("VIRTUAL_ENV", "") + os.path.sep + "etc" + os.path.sep + "bzt.d"
sys.stdout.write("Creating %s" % dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
src = os.path.dirname(__file__)
src += os.path.sep + "bzt" + os.path.sep + "10-base.json"
sys.stdout.write("Copying %s to %s" % (src, dirname))
shutil.copy(src, dirname + os.path.sep)
setup(
name="bzt",
version=bzt.version,
description='Taurus Tool for Continuous Testing',
author='Andrey Pokhilko',
author_email='andrey@blazemeter.com',
url='https://github.com/Blazemeter/taurus/',
install_requires=[
'pyyaml', 'psutil', 'colorlog', 'lxml', 'cssselect', 'urwid'
],
packages=['bzt', 'bzt.modules'],
entry_points={
'console_scripts': [
'bzt=bzt.cli:main',
],
},
package_data={
"bzt": []
},
cmdclass=dict(install=InstallWithHook)
)
| Update meta-info, first public package exposed | Update meta-info, first public package exposed
| Python | apache-2.0 | Nefry/taurus,arthurlogilab/taurus,greyfenrir/taurus,Blazemeter/taurus,Blazemeter/taurus,Blazemeter/taurus,Nefry/taurus,Nefry/taurus,arthurlogilab/taurus,Blazemeter/taurus,greyfenrir/taurus,greyfenrir/taurus,arthurlogilab/taurus,itaymendel/taurus,Nefry/taurus,itaymendel/taurus,arthurlogilab/taurus,greyfenrir/taurus,Blazemeter/taurus,greyfenrir/taurus,greyfenrir/taurus,Blazemeter/taurus,itaymendel/taurus,Blazemeter/taurus,arthurlogilab/taurus,greyfenrir/taurus,Blazemeter/taurus,itaymendel/taurus,itaymendel/taurus,greyfenrir/taurus,Blazemeter/taurus,greyfenrir/taurus,Nefry/taurus | import os
import shutil
import sys
from setuptools import setup
from setuptools.command.install import install
import bzt
class InstallWithHook(install, object):
"""
Command adding post-install hook to setup
"""
def run(self):
"""
Do the command's job!
"""
install.run(self)
self.__hook()
def __hook(self):
dirname = os.getenv("VIRTUAL_ENV", "") + os.path.sep + "etc" + os.path.sep + "bzt.d"
sys.stdout.write("Creating %s" % dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
src = os.path.dirname(__file__)
src += os.path.sep + "bzt" + os.path.sep + "10-base.json"
sys.stdout.write("Copying %s to %s" % (src, dirname))
shutil.copy(src, dirname + os.path.sep)
setup(
name="bzt",
version=bzt.version,
install_requires=[
'pyyaml', 'psutil', 'colorlog', 'lxml', 'cssselect', 'urwid'
],
packages=['bzt', 'bzt.modules'],
entry_points={
'console_scripts': [
'bzt=bzt.cli:main',
],
},
package_data={
"bzt": []
},
cmdclass=dict(install=InstallWithHook)
)
Update meta-info, first public package exposed | import os
import shutil
import sys
from setuptools import setup
from setuptools.command.install import install
import bzt
class InstallWithHook(install, object):
"""
Command adding post-install hook to setup
"""
def run(self):
"""
Do the command's job!
"""
install.run(self)
self.__hook()
def __hook(self):
dirname = os.getenv("VIRTUAL_ENV", "") + os.path.sep + "etc" + os.path.sep + "bzt.d"
sys.stdout.write("Creating %s" % dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
src = os.path.dirname(__file__)
src += os.path.sep + "bzt" + os.path.sep + "10-base.json"
sys.stdout.write("Copying %s to %s" % (src, dirname))
shutil.copy(src, dirname + os.path.sep)
setup(
name="bzt",
version=bzt.version,
description='Taurus Tool for Continuous Testing',
author='Andrey Pokhilko',
author_email='andrey@blazemeter.com',
url='https://github.com/Blazemeter/taurus/',
install_requires=[
'pyyaml', 'psutil', 'colorlog', 'lxml', 'cssselect', 'urwid'
],
packages=['bzt', 'bzt.modules'],
entry_points={
'console_scripts': [
'bzt=bzt.cli:main',
],
},
package_data={
"bzt": []
},
cmdclass=dict(install=InstallWithHook)
)
| <commit_before>import os
import shutil
import sys
from setuptools import setup
from setuptools.command.install import install
import bzt
class InstallWithHook(install, object):
"""
Command adding post-install hook to setup
"""
def run(self):
"""
Do the command's job!
"""
install.run(self)
self.__hook()
def __hook(self):
dirname = os.getenv("VIRTUAL_ENV", "") + os.path.sep + "etc" + os.path.sep + "bzt.d"
sys.stdout.write("Creating %s" % dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
src = os.path.dirname(__file__)
src += os.path.sep + "bzt" + os.path.sep + "10-base.json"
sys.stdout.write("Copying %s to %s" % (src, dirname))
shutil.copy(src, dirname + os.path.sep)
setup(
name="bzt",
version=bzt.version,
install_requires=[
'pyyaml', 'psutil', 'colorlog', 'lxml', 'cssselect', 'urwid'
],
packages=['bzt', 'bzt.modules'],
entry_points={
'console_scripts': [
'bzt=bzt.cli:main',
],
},
package_data={
"bzt": []
},
cmdclass=dict(install=InstallWithHook)
)
<commit_msg>Update meta-info, first public package exposed<commit_after> | import os
import shutil
import sys
from setuptools import setup
from setuptools.command.install import install
import bzt
class InstallWithHook(install, object):
"""
Command adding post-install hook to setup
"""
def run(self):
"""
Do the command's job!
"""
install.run(self)
self.__hook()
def __hook(self):
dirname = os.getenv("VIRTUAL_ENV", "") + os.path.sep + "etc" + os.path.sep + "bzt.d"
sys.stdout.write("Creating %s" % dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
src = os.path.dirname(__file__)
src += os.path.sep + "bzt" + os.path.sep + "10-base.json"
sys.stdout.write("Copying %s to %s" % (src, dirname))
shutil.copy(src, dirname + os.path.sep)
setup(
name="bzt",
version=bzt.version,
description='Taurus Tool for Continuous Testing',
author='Andrey Pokhilko',
author_email='andrey@blazemeter.com',
url='https://github.com/Blazemeter/taurus/',
install_requires=[
'pyyaml', 'psutil', 'colorlog', 'lxml', 'cssselect', 'urwid'
],
packages=['bzt', 'bzt.modules'],
entry_points={
'console_scripts': [
'bzt=bzt.cli:main',
],
},
package_data={
"bzt": []
},
cmdclass=dict(install=InstallWithHook)
)
| import os
import shutil
import sys
from setuptools import setup
from setuptools.command.install import install
import bzt
class InstallWithHook(install, object):
"""
Command adding post-install hook to setup
"""
def run(self):
"""
Do the command's job!
"""
install.run(self)
self.__hook()
def __hook(self):
dirname = os.getenv("VIRTUAL_ENV", "") + os.path.sep + "etc" + os.path.sep + "bzt.d"
sys.stdout.write("Creating %s" % dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
src = os.path.dirname(__file__)
src += os.path.sep + "bzt" + os.path.sep + "10-base.json"
sys.stdout.write("Copying %s to %s" % (src, dirname))
shutil.copy(src, dirname + os.path.sep)
setup(
name="bzt",
version=bzt.version,
install_requires=[
'pyyaml', 'psutil', 'colorlog', 'lxml', 'cssselect', 'urwid'
],
packages=['bzt', 'bzt.modules'],
entry_points={
'console_scripts': [
'bzt=bzt.cli:main',
],
},
package_data={
"bzt": []
},
cmdclass=dict(install=InstallWithHook)
)
Update meta-info, first public package exposedimport os
import shutil
import sys
from setuptools import setup
from setuptools.command.install import install
import bzt
class InstallWithHook(install, object):
"""
Command adding post-install hook to setup
"""
def run(self):
"""
Do the command's job!
"""
install.run(self)
self.__hook()
def __hook(self):
dirname = os.getenv("VIRTUAL_ENV", "") + os.path.sep + "etc" + os.path.sep + "bzt.d"
sys.stdout.write("Creating %s" % dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
src = os.path.dirname(__file__)
src += os.path.sep + "bzt" + os.path.sep + "10-base.json"
sys.stdout.write("Copying %s to %s" % (src, dirname))
shutil.copy(src, dirname + os.path.sep)
setup(
name="bzt",
version=bzt.version,
description='Taurus Tool for Continuous Testing',
author='Andrey Pokhilko',
author_email='andrey@blazemeter.com',
url='https://github.com/Blazemeter/taurus/',
install_requires=[
'pyyaml', 'psutil', 'colorlog', 'lxml', 'cssselect', 'urwid'
],
packages=['bzt', 'bzt.modules'],
entry_points={
'console_scripts': [
'bzt=bzt.cli:main',
],
},
package_data={
"bzt": []
},
cmdclass=dict(install=InstallWithHook)
)
| <commit_before>import os
import shutil
import sys
from setuptools import setup
from setuptools.command.install import install
import bzt
class InstallWithHook(install, object):
"""
Command adding post-install hook to setup
"""
def run(self):
"""
Do the command's job!
"""
install.run(self)
self.__hook()
def __hook(self):
dirname = os.getenv("VIRTUAL_ENV", "") + os.path.sep + "etc" + os.path.sep + "bzt.d"
sys.stdout.write("Creating %s" % dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
src = os.path.dirname(__file__)
src += os.path.sep + "bzt" + os.path.sep + "10-base.json"
sys.stdout.write("Copying %s to %s" % (src, dirname))
shutil.copy(src, dirname + os.path.sep)
setup(
name="bzt",
version=bzt.version,
install_requires=[
'pyyaml', 'psutil', 'colorlog', 'lxml', 'cssselect', 'urwid'
],
packages=['bzt', 'bzt.modules'],
entry_points={
'console_scripts': [
'bzt=bzt.cli:main',
],
},
package_data={
"bzt": []
},
cmdclass=dict(install=InstallWithHook)
)
<commit_msg>Update meta-info, first public package exposed<commit_after>import os
import shutil
import sys
from setuptools import setup
from setuptools.command.install import install
import bzt
class InstallWithHook(install, object):
"""
Command adding post-install hook to setup
"""
def run(self):
"""
Do the command's job!
"""
install.run(self)
self.__hook()
def __hook(self):
dirname = os.getenv("VIRTUAL_ENV", "") + os.path.sep + "etc" + os.path.sep + "bzt.d"
sys.stdout.write("Creating %s" % dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
src = os.path.dirname(__file__)
src += os.path.sep + "bzt" + os.path.sep + "10-base.json"
sys.stdout.write("Copying %s to %s" % (src, dirname))
shutil.copy(src, dirname + os.path.sep)
setup(
name="bzt",
version=bzt.version,
description='Taurus Tool for Continuous Testing',
author='Andrey Pokhilko',
author_email='andrey@blazemeter.com',
url='https://github.com/Blazemeter/taurus/',
install_requires=[
'pyyaml', 'psutil', 'colorlog', 'lxml', 'cssselect', 'urwid'
],
packages=['bzt', 'bzt.modules'],
entry_points={
'console_scripts': [
'bzt=bzt.cli:main',
],
},
package_data={
"bzt": []
},
cmdclass=dict(install=InstallWithHook)
)
|
c681b423dfbad1a7ef2f1a85f98f3ea278424f56 | setup.py | setup.py | from setuptools import setup
config = {
'name': 'timew-report',
'version': '0.0.0',
'description': 'An interface for TimeWarrior report data',
'author': 'Thomas Lauf',
'author_email': 'Thomas.Lauf@tngtech.com',
'license': 'MIT License',
'classifiers': [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords': 'timewarrior taskwarrior time-tracking',
'packages': ['timewreport'],
'install_requires': ['python-dateutil'],
}
setup(**config)
| from setuptools import setup
config = {
'name': 'timew-report',
'version': '0.0.0',
'description': 'An interface for TimeWarrior report data',
'long_description': '\n' + open('README.md').read(),
'url': 'https://github.com/lauft/timew-report.git',
'author': 'Thomas Lauf',
'author_email': 'Thomas.Lauf@tngtech.com',
'license': 'MIT License',
'classifiers': [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords': 'timewarrior taskwarrior time-tracking',
'packages': ['timewreport'],
'install_requires': ['python-dateutil'],
}
setup(**config)
| Update long description and url | Update long description and url
| Python | mit | lauft/timew-report | from setuptools import setup
config = {
'name': 'timew-report',
'version': '0.0.0',
'description': 'An interface for TimeWarrior report data',
'author': 'Thomas Lauf',
'author_email': 'Thomas.Lauf@tngtech.com',
'license': 'MIT License',
'classifiers': [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords': 'timewarrior taskwarrior time-tracking',
'packages': ['timewreport'],
'install_requires': ['python-dateutil'],
}
setup(**config)
Update long description and url | from setuptools import setup
config = {
'name': 'timew-report',
'version': '0.0.0',
'description': 'An interface for TimeWarrior report data',
'long_description': '\n' + open('README.md').read(),
'url': 'https://github.com/lauft/timew-report.git',
'author': 'Thomas Lauf',
'author_email': 'Thomas.Lauf@tngtech.com',
'license': 'MIT License',
'classifiers': [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords': 'timewarrior taskwarrior time-tracking',
'packages': ['timewreport'],
'install_requires': ['python-dateutil'],
}
setup(**config)
| <commit_before>from setuptools import setup
config = {
'name': 'timew-report',
'version': '0.0.0',
'description': 'An interface for TimeWarrior report data',
'author': 'Thomas Lauf',
'author_email': 'Thomas.Lauf@tngtech.com',
'license': 'MIT License',
'classifiers': [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords': 'timewarrior taskwarrior time-tracking',
'packages': ['timewreport'],
'install_requires': ['python-dateutil'],
}
setup(**config)
<commit_msg>Update long description and url<commit_after> | from setuptools import setup
config = {
'name': 'timew-report',
'version': '0.0.0',
'description': 'An interface for TimeWarrior report data',
'long_description': '\n' + open('README.md').read(),
'url': 'https://github.com/lauft/timew-report.git',
'author': 'Thomas Lauf',
'author_email': 'Thomas.Lauf@tngtech.com',
'license': 'MIT License',
'classifiers': [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords': 'timewarrior taskwarrior time-tracking',
'packages': ['timewreport'],
'install_requires': ['python-dateutil'],
}
setup(**config)
| from setuptools import setup
config = {
'name': 'timew-report',
'version': '0.0.0',
'description': 'An interface for TimeWarrior report data',
'author': 'Thomas Lauf',
'author_email': 'Thomas.Lauf@tngtech.com',
'license': 'MIT License',
'classifiers': [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords': 'timewarrior taskwarrior time-tracking',
'packages': ['timewreport'],
'install_requires': ['python-dateutil'],
}
setup(**config)
Update long description and urlfrom setuptools import setup
config = {
'name': 'timew-report',
'version': '0.0.0',
'description': 'An interface for TimeWarrior report data',
'long_description': '\n' + open('README.md').read(),
'url': 'https://github.com/lauft/timew-report.git',
'author': 'Thomas Lauf',
'author_email': 'Thomas.Lauf@tngtech.com',
'license': 'MIT License',
'classifiers': [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords': 'timewarrior taskwarrior time-tracking',
'packages': ['timewreport'],
'install_requires': ['python-dateutil'],
}
setup(**config)
| <commit_before>from setuptools import setup
config = {
'name': 'timew-report',
'version': '0.0.0',
'description': 'An interface for TimeWarrior report data',
'author': 'Thomas Lauf',
'author_email': 'Thomas.Lauf@tngtech.com',
'license': 'MIT License',
'classifiers': [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords': 'timewarrior taskwarrior time-tracking',
'packages': ['timewreport'],
'install_requires': ['python-dateutil'],
}
setup(**config)
<commit_msg>Update long description and url<commit_after>from setuptools import setup
config = {
'name': 'timew-report',
'version': '0.0.0',
'description': 'An interface for TimeWarrior report data',
'long_description': '\n' + open('README.md').read(),
'url': 'https://github.com/lauft/timew-report.git',
'author': 'Thomas Lauf',
'author_email': 'Thomas.Lauf@tngtech.com',
'license': 'MIT License',
'classifiers': [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
'keywords': 'timewarrior taskwarrior time-tracking',
'packages': ['timewreport'],
'install_requires': ['python-dateutil'],
}
setup(**config)
|
d11e752bc6b3c573600ca916f4737e80a5a18bea | setup.py | setup.py | #! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='djpl-emailing',
version='0.1',
description='a django-productline feature to include schnipp.js',
long_description=read('README.rst'),
license='The MIT License',
keywords='django, django-productline, email',
author='Toni Michel',
author_email='toni@schnapptack.de',
url="https://github.com/tonimichel/djpl-emailing",
packages=find_packages(),
package_dir={'emailing': 'emailing'},
include_package_data=True,
scripts=[],
zip_safe=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
'django-productline', 'premailer==3.0.1'
]
)
| #! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='djpl-emailing',
version='0.1',
description='a django-productline feature to include schnipp.js',
long_description=read('README.rst'),
license='The MIT License',
keywords='django, django-productline, email',
author='Toni Michel',
author_email='toni@schnapptack.de',
url="https://github.com/tonimichel/djpl-emailing",
packages=find_packages(),
package_dir={'emailing': 'emailing'},
include_package_data=True,
scripts=[],
zip_safe=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
'django-productline', 'premailer==3.1.1'
]
)
| Set premailer version to 3.1.1 | Set premailer version to 3.1.1 | Python | mit | schnapptack/djpl-emailing,schnapptack/djpl-emailing | #! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='djpl-emailing',
version='0.1',
description='a django-productline feature to include schnipp.js',
long_description=read('README.rst'),
license='The MIT License',
keywords='django, django-productline, email',
author='Toni Michel',
author_email='toni@schnapptack.de',
url="https://github.com/tonimichel/djpl-emailing",
packages=find_packages(),
package_dir={'emailing': 'emailing'},
include_package_data=True,
scripts=[],
zip_safe=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
'django-productline', 'premailer==3.0.1'
]
)
Set premailer version to 3.1.1 | #! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='djpl-emailing',
version='0.1',
description='a django-productline feature to include schnipp.js',
long_description=read('README.rst'),
license='The MIT License',
keywords='django, django-productline, email',
author='Toni Michel',
author_email='toni@schnapptack.de',
url="https://github.com/tonimichel/djpl-emailing",
packages=find_packages(),
package_dir={'emailing': 'emailing'},
include_package_data=True,
scripts=[],
zip_safe=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
'django-productline', 'premailer==3.1.1'
]
)
| <commit_before>#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='djpl-emailing',
version='0.1',
description='a django-productline feature to include schnipp.js',
long_description=read('README.rst'),
license='The MIT License',
keywords='django, django-productline, email',
author='Toni Michel',
author_email='toni@schnapptack.de',
url="https://github.com/tonimichel/djpl-emailing",
packages=find_packages(),
package_dir={'emailing': 'emailing'},
include_package_data=True,
scripts=[],
zip_safe=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
'django-productline', 'premailer==3.0.1'
]
)
<commit_msg>Set premailer version to 3.1.1<commit_after> | #! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='djpl-emailing',
version='0.1',
description='a django-productline feature to include schnipp.js',
long_description=read('README.rst'),
license='The MIT License',
keywords='django, django-productline, email',
author='Toni Michel',
author_email='toni@schnapptack.de',
url="https://github.com/tonimichel/djpl-emailing",
packages=find_packages(),
package_dir={'emailing': 'emailing'},
include_package_data=True,
scripts=[],
zip_safe=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
'django-productline', 'premailer==3.1.1'
]
)
| #! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='djpl-emailing',
version='0.1',
description='a django-productline feature to include schnipp.js',
long_description=read('README.rst'),
license='The MIT License',
keywords='django, django-productline, email',
author='Toni Michel',
author_email='toni@schnapptack.de',
url="https://github.com/tonimichel/djpl-emailing",
packages=find_packages(),
package_dir={'emailing': 'emailing'},
include_package_data=True,
scripts=[],
zip_safe=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
'django-productline', 'premailer==3.0.1'
]
)
Set premailer version to 3.1.1#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='djpl-emailing',
version='0.1',
description='a django-productline feature to include schnipp.js',
long_description=read('README.rst'),
license='The MIT License',
keywords='django, django-productline, email',
author='Toni Michel',
author_email='toni@schnapptack.de',
url="https://github.com/tonimichel/djpl-emailing",
packages=find_packages(),
package_dir={'emailing': 'emailing'},
include_package_data=True,
scripts=[],
zip_safe=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
'django-productline', 'premailer==3.1.1'
]
)
| <commit_before>#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='djpl-emailing',
version='0.1',
description='a django-productline feature to include schnipp.js',
long_description=read('README.rst'),
license='The MIT License',
keywords='django, django-productline, email',
author='Toni Michel',
author_email='toni@schnapptack.de',
url="https://github.com/tonimichel/djpl-emailing",
packages=find_packages(),
package_dir={'emailing': 'emailing'},
include_package_data=True,
scripts=[],
zip_safe=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
'django-productline', 'premailer==3.0.1'
]
)
<commit_msg>Set premailer version to 3.1.1<commit_after>#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='djpl-emailing',
version='0.1',
description='a django-productline feature to include schnipp.js',
long_description=read('README.rst'),
license='The MIT License',
keywords='django, django-productline, email',
author='Toni Michel',
author_email='toni@schnapptack.de',
url="https://github.com/tonimichel/djpl-emailing",
packages=find_packages(),
package_dir={'emailing': 'emailing'},
include_package_data=True,
scripts=[],
zip_safe=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
'django-productline', 'premailer==3.1.1'
]
)
|
9e03ae0a7db5e98c8ee95e930c983d37442581c3 | setup.py | setup.py | from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
| from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click', 'scikit-image'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
| Add scikit-image as a dep | Add scikit-image as a dep
| Python | apache-2.0 | widoptimization-willett/feature-extraction | from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
Add scikit-image as a dep | from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click', 'scikit-image'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
| <commit_before>from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
<commit_msg>Add scikit-image as a dep<commit_after> | from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click', 'scikit-image'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
| from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
Add scikit-image as a depfrom setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click', 'scikit-image'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
| <commit_before>from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
<commit_msg>Add scikit-image as a dep<commit_after>from setuptools import setup
setup(
name='feature-extraction',
author='Liam Marshall',
author_email='limarshall@wisc.edu',
version='0.1',
license='Apache',
packages=['feature_extraction'],
install_requires=['numpy', 'Pillow', 'Click', 'scikit-image'],
entry_points='''
[console_scripts]
extract_features=feature_extraction.cli:extract_features
''',
)
|
517742cbf787ad7ac09a518c34307ac3c2e561ba | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='rawdisk',
author='D. Bakunas',
version='0.1dev',
description='Experimental python code to learn different disk formats',
packages=find_packages(),
license='LICENSE.txt',
long_description=open('README.md').read(),
install_requires=[
'hexdump >= 2.0',
'hurry.filesize >= 0.9',
'yapsy >= 1.10.323'
],
entry_points={
'console_scripts': [
'rawdisk = rawdisk.main:main',
]
}
)
| from setuptools import setup, find_packages
setup(
name='rawdisk',
author='D. Bakunas',
version='0.2dev',
description='Experimental python code to learn different disk formats',
packages=find_packages(),
package_data = {'rawdisk.plugins.filesystems' : ['*.yapsy-plugin']},
license='LICENSE.txt',
long_description=open('README.md').read(),
install_requires=[
'hexdump >= 2.0',
'hurry.filesize >= 0.9',
'yapsy >= 1.10.323'
],
entry_points={
'console_scripts': [
'rawdisk = rawdisk.main:main',
]
}
)
| Make sure plugin configuration files are included | Make sure plugin configuration files are included
| Python | bsd-3-clause | dariusbakunas/rawdisk | from setuptools import setup, find_packages
setup(
name='rawdisk',
author='D. Bakunas',
version='0.1dev',
description='Experimental python code to learn different disk formats',
packages=find_packages(),
license='LICENSE.txt',
long_description=open('README.md').read(),
install_requires=[
'hexdump >= 2.0',
'hurry.filesize >= 0.9',
'yapsy >= 1.10.323'
],
entry_points={
'console_scripts': [
'rawdisk = rawdisk.main:main',
]
}
)
Make sure plugin configuration files are included | from setuptools import setup, find_packages
setup(
name='rawdisk',
author='D. Bakunas',
version='0.2dev',
description='Experimental python code to learn different disk formats',
packages=find_packages(),
package_data = {'rawdisk.plugins.filesystems' : ['*.yapsy-plugin']},
license='LICENSE.txt',
long_description=open('README.md').read(),
install_requires=[
'hexdump >= 2.0',
'hurry.filesize >= 0.9',
'yapsy >= 1.10.323'
],
entry_points={
'console_scripts': [
'rawdisk = rawdisk.main:main',
]
}
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='rawdisk',
author='D. Bakunas',
version='0.1dev',
description='Experimental python code to learn different disk formats',
packages=find_packages(),
license='LICENSE.txt',
long_description=open('README.md').read(),
install_requires=[
'hexdump >= 2.0',
'hurry.filesize >= 0.9',
'yapsy >= 1.10.323'
],
entry_points={
'console_scripts': [
'rawdisk = rawdisk.main:main',
]
}
)
<commit_msg>Make sure plugin configuration files are included<commit_after> | from setuptools import setup, find_packages
setup(
name='rawdisk',
author='D. Bakunas',
version='0.2dev',
description='Experimental python code to learn different disk formats',
packages=find_packages(),
package_data = {'rawdisk.plugins.filesystems' : ['*.yapsy-plugin']},
license='LICENSE.txt',
long_description=open('README.md').read(),
install_requires=[
'hexdump >= 2.0',
'hurry.filesize >= 0.9',
'yapsy >= 1.10.323'
],
entry_points={
'console_scripts': [
'rawdisk = rawdisk.main:main',
]
}
)
| from setuptools import setup, find_packages
setup(
name='rawdisk',
author='D. Bakunas',
version='0.1dev',
description='Experimental python code to learn different disk formats',
packages=find_packages(),
license='LICENSE.txt',
long_description=open('README.md').read(),
install_requires=[
'hexdump >= 2.0',
'hurry.filesize >= 0.9',
'yapsy >= 1.10.323'
],
entry_points={
'console_scripts': [
'rawdisk = rawdisk.main:main',
]
}
)
Make sure plugin configuration files are includedfrom setuptools import setup, find_packages
setup(
name='rawdisk',
author='D. Bakunas',
version='0.2dev',
description='Experimental python code to learn different disk formats',
packages=find_packages(),
package_data = {'rawdisk.plugins.filesystems' : ['*.yapsy-plugin']},
license='LICENSE.txt',
long_description=open('README.md').read(),
install_requires=[
'hexdump >= 2.0',
'hurry.filesize >= 0.9',
'yapsy >= 1.10.323'
],
entry_points={
'console_scripts': [
'rawdisk = rawdisk.main:main',
]
}
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='rawdisk',
author='D. Bakunas',
version='0.1dev',
description='Experimental python code to learn different disk formats',
packages=find_packages(),
license='LICENSE.txt',
long_description=open('README.md').read(),
install_requires=[
'hexdump >= 2.0',
'hurry.filesize >= 0.9',
'yapsy >= 1.10.323'
],
entry_points={
'console_scripts': [
'rawdisk = rawdisk.main:main',
]
}
)
<commit_msg>Make sure plugin configuration files are included<commit_after>from setuptools import setup, find_packages
setup(
name='rawdisk',
author='D. Bakunas',
version='0.2dev',
description='Experimental python code to learn different disk formats',
packages=find_packages(),
package_data = {'rawdisk.plugins.filesystems' : ['*.yapsy-plugin']},
license='LICENSE.txt',
long_description=open('README.md').read(),
install_requires=[
'hexdump >= 2.0',
'hurry.filesize >= 0.9',
'yapsy >= 1.10.323'
],
entry_points={
'console_scripts': [
'rawdisk = rawdisk.main:main',
]
}
)
|
5f35af0e5ee1d0082978f2a3ae2ccd78a0ee33e3 | setup.py | setup.py | from setuptools import setup, find_packages # pylint: disable=no-name-in-module,import-error
def dependencies(file):
with open(file) as f:
return f.read().splitlines()
with open("README.md", encoding='utf-8') as infile:
long_description = infile.read()
setup(
name='halo',
packages=find_packages(exclude=('tests', 'examples')),
version='0.0.23',
license='MIT',
description='Beautiful terminal spinners in Python',
long_description=long_description,
long_description_content_type="text/markdown",
author='Manraj Singh',
author_email='manrajsinghgrover@gmail.com',
url='https://github.com/manrajgrover/halo',
keywords=[
"console",
"loading",
"indicator",
"progress",
"cli",
"spinner",
"spinners",
"terminal",
"term",
"busy",
"wait",
"idle"
],
install_requires=dependencies('requirements.txt'),
tests_require=dependencies('requirements-dev.txt'),
include_package_data=True,
extras_require={
'ipython': [
'IPython==5.7.0',
'ipywidgets==7.1.0',
]
}
)
| import io
from setuptools import setup, find_packages # pylint: disable=no-name-in-module,import-error
def dependencies(file):
with open(file) as f:
return f.read().splitlines()
with io.open("README.md", encoding='utf-8') as infile:
long_description = infile.read()
setup(
name='halo',
packages=find_packages(exclude=('tests', 'examples')),
version='0.0.23',
license='MIT',
description='Beautiful terminal spinners in Python',
long_description=long_description,
long_description_content_type="text/markdown",
author='Manraj Singh',
author_email='manrajsinghgrover@gmail.com',
url='https://github.com/manrajgrover/halo',
keywords=[
"console",
"loading",
"indicator",
"progress",
"cli",
"spinner",
"spinners",
"terminal",
"term",
"busy",
"wait",
"idle"
],
install_requires=dependencies('requirements.txt'),
tests_require=dependencies('requirements-dev.txt'),
include_package_data=True,
extras_require={
'ipython': [
'IPython==5.7.0',
'ipywidgets==7.1.0',
]
}
)
| Use io.open to support Python 2 | Use io.open to support Python 2 | Python | mit | manrajgrover/halo,ManrajGrover/halo | from setuptools import setup, find_packages # pylint: disable=no-name-in-module,import-error
def dependencies(file):
with open(file) as f:
return f.read().splitlines()
with open("README.md", encoding='utf-8') as infile:
long_description = infile.read()
setup(
name='halo',
packages=find_packages(exclude=('tests', 'examples')),
version='0.0.23',
license='MIT',
description='Beautiful terminal spinners in Python',
long_description=long_description,
long_description_content_type="text/markdown",
author='Manraj Singh',
author_email='manrajsinghgrover@gmail.com',
url='https://github.com/manrajgrover/halo',
keywords=[
"console",
"loading",
"indicator",
"progress",
"cli",
"spinner",
"spinners",
"terminal",
"term",
"busy",
"wait",
"idle"
],
install_requires=dependencies('requirements.txt'),
tests_require=dependencies('requirements-dev.txt'),
include_package_data=True,
extras_require={
'ipython': [
'IPython==5.7.0',
'ipywidgets==7.1.0',
]
}
)
Use io.open to support Python 2 | import io
from setuptools import setup, find_packages # pylint: disable=no-name-in-module,import-error
def dependencies(file):
with open(file) as f:
return f.read().splitlines()
with io.open("README.md", encoding='utf-8') as infile:
long_description = infile.read()
setup(
name='halo',
packages=find_packages(exclude=('tests', 'examples')),
version='0.0.23',
license='MIT',
description='Beautiful terminal spinners in Python',
long_description=long_description,
long_description_content_type="text/markdown",
author='Manraj Singh',
author_email='manrajsinghgrover@gmail.com',
url='https://github.com/manrajgrover/halo',
keywords=[
"console",
"loading",
"indicator",
"progress",
"cli",
"spinner",
"spinners",
"terminal",
"term",
"busy",
"wait",
"idle"
],
install_requires=dependencies('requirements.txt'),
tests_require=dependencies('requirements-dev.txt'),
include_package_data=True,
extras_require={
'ipython': [
'IPython==5.7.0',
'ipywidgets==7.1.0',
]
}
)
| <commit_before>from setuptools import setup, find_packages # pylint: disable=no-name-in-module,import-error
def dependencies(file):
with open(file) as f:
return f.read().splitlines()
with open("README.md", encoding='utf-8') as infile:
long_description = infile.read()
setup(
name='halo',
packages=find_packages(exclude=('tests', 'examples')),
version='0.0.23',
license='MIT',
description='Beautiful terminal spinners in Python',
long_description=long_description,
long_description_content_type="text/markdown",
author='Manraj Singh',
author_email='manrajsinghgrover@gmail.com',
url='https://github.com/manrajgrover/halo',
keywords=[
"console",
"loading",
"indicator",
"progress",
"cli",
"spinner",
"spinners",
"terminal",
"term",
"busy",
"wait",
"idle"
],
install_requires=dependencies('requirements.txt'),
tests_require=dependencies('requirements-dev.txt'),
include_package_data=True,
extras_require={
'ipython': [
'IPython==5.7.0',
'ipywidgets==7.1.0',
]
}
)
<commit_msg>Use io.open to support Python 2<commit_after> | import io
from setuptools import setup, find_packages # pylint: disable=no-name-in-module,import-error
def dependencies(file):
with open(file) as f:
return f.read().splitlines()
with io.open("README.md", encoding='utf-8') as infile:
long_description = infile.read()
setup(
name='halo',
packages=find_packages(exclude=('tests', 'examples')),
version='0.0.23',
license='MIT',
description='Beautiful terminal spinners in Python',
long_description=long_description,
long_description_content_type="text/markdown",
author='Manraj Singh',
author_email='manrajsinghgrover@gmail.com',
url='https://github.com/manrajgrover/halo',
keywords=[
"console",
"loading",
"indicator",
"progress",
"cli",
"spinner",
"spinners",
"terminal",
"term",
"busy",
"wait",
"idle"
],
install_requires=dependencies('requirements.txt'),
tests_require=dependencies('requirements-dev.txt'),
include_package_data=True,
extras_require={
'ipython': [
'IPython==5.7.0',
'ipywidgets==7.1.0',
]
}
)
| from setuptools import setup, find_packages # pylint: disable=no-name-in-module,import-error
def dependencies(file):
with open(file) as f:
return f.read().splitlines()
with open("README.md", encoding='utf-8') as infile:
long_description = infile.read()
setup(
name='halo',
packages=find_packages(exclude=('tests', 'examples')),
version='0.0.23',
license='MIT',
description='Beautiful terminal spinners in Python',
long_description=long_description,
long_description_content_type="text/markdown",
author='Manraj Singh',
author_email='manrajsinghgrover@gmail.com',
url='https://github.com/manrajgrover/halo',
keywords=[
"console",
"loading",
"indicator",
"progress",
"cli",
"spinner",
"spinners",
"terminal",
"term",
"busy",
"wait",
"idle"
],
install_requires=dependencies('requirements.txt'),
tests_require=dependencies('requirements-dev.txt'),
include_package_data=True,
extras_require={
'ipython': [
'IPython==5.7.0',
'ipywidgets==7.1.0',
]
}
)
Use io.open to support Python 2import io
from setuptools import setup, find_packages # pylint: disable=no-name-in-module,import-error
def dependencies(file):
with open(file) as f:
return f.read().splitlines()
with io.open("README.md", encoding='utf-8') as infile:
long_description = infile.read()
setup(
name='halo',
packages=find_packages(exclude=('tests', 'examples')),
version='0.0.23',
license='MIT',
description='Beautiful terminal spinners in Python',
long_description=long_description,
long_description_content_type="text/markdown",
author='Manraj Singh',
author_email='manrajsinghgrover@gmail.com',
url='https://github.com/manrajgrover/halo',
keywords=[
"console",
"loading",
"indicator",
"progress",
"cli",
"spinner",
"spinners",
"terminal",
"term",
"busy",
"wait",
"idle"
],
install_requires=dependencies('requirements.txt'),
tests_require=dependencies('requirements-dev.txt'),
include_package_data=True,
extras_require={
'ipython': [
'IPython==5.7.0',
'ipywidgets==7.1.0',
]
}
)
| <commit_before>from setuptools import setup, find_packages # pylint: disable=no-name-in-module,import-error
def dependencies(file):
with open(file) as f:
return f.read().splitlines()
with open("README.md", encoding='utf-8') as infile:
long_description = infile.read()
setup(
name='halo',
packages=find_packages(exclude=('tests', 'examples')),
version='0.0.23',
license='MIT',
description='Beautiful terminal spinners in Python',
long_description=long_description,
long_description_content_type="text/markdown",
author='Manraj Singh',
author_email='manrajsinghgrover@gmail.com',
url='https://github.com/manrajgrover/halo',
keywords=[
"console",
"loading",
"indicator",
"progress",
"cli",
"spinner",
"spinners",
"terminal",
"term",
"busy",
"wait",
"idle"
],
install_requires=dependencies('requirements.txt'),
tests_require=dependencies('requirements-dev.txt'),
include_package_data=True,
extras_require={
'ipython': [
'IPython==5.7.0',
'ipywidgets==7.1.0',
]
}
)
<commit_msg>Use io.open to support Python 2<commit_after>import io
from setuptools import setup, find_packages # pylint: disable=no-name-in-module,import-error
def dependencies(file):
with open(file) as f:
return f.read().splitlines()
with io.open("README.md", encoding='utf-8') as infile:
long_description = infile.read()
setup(
name='halo',
packages=find_packages(exclude=('tests', 'examples')),
version='0.0.23',
license='MIT',
description='Beautiful terminal spinners in Python',
long_description=long_description,
long_description_content_type="text/markdown",
author='Manraj Singh',
author_email='manrajsinghgrover@gmail.com',
url='https://github.com/manrajgrover/halo',
keywords=[
"console",
"loading",
"indicator",
"progress",
"cli",
"spinner",
"spinners",
"terminal",
"term",
"busy",
"wait",
"idle"
],
install_requires=dependencies('requirements.txt'),
tests_require=dependencies('requirements-dev.txt'),
include_package_data=True,
extras_require={
'ipython': [
'IPython==5.7.0',
'ipywidgets==7.1.0',
]
}
)
|
8b9b25dcc0b906d70dd632898146ffaad0bc57fb | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
The setup script for napalm-logs
'''
import uuid
from setuptools import setup, find_packages
from pip.req import parse_requirements
__author__ = 'Mircea Ulinic <mircea.ulinic@gmail.com>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='napalm-logs',
version='0.1.0',
packages=find_packages(),
author='Mircea Ulinic',
author_email='mircea.ulinic@gmail.com',
description='Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser',
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url='https://github.com/napalm-automation/napalm-logs',
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
],
}
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
The setup script for napalm-logs
'''
import uuid
from setuptools import setup, find_packages
from pip.req import parse_requirements
__author__ = 'Mircea Ulinic <mircea.ulinic@gmail.com>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='napalm-logs',
version='0.0.1a0.dev5',
packages=find_packages(),
author='Mircea Ulinic',
author_email='mircea.ulinic@gmail.com',
description='Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser',
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url='https://github.com/napalm-automation/napalm-logs',
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'napalm-logs=napalm_logs.scripts.napalm_logs'
],
}
)
| Set dev version and link CLI script | Set dev version and link CLI script
| Python | apache-2.0 | napalm-automation/napalm-logs,napalm-automation/napalm-logs | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
The setup script for napalm-logs
'''
import uuid
from setuptools import setup, find_packages
from pip.req import parse_requirements
__author__ = 'Mircea Ulinic <mircea.ulinic@gmail.com>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='napalm-logs',
version='0.1.0',
packages=find_packages(),
author='Mircea Ulinic',
author_email='mircea.ulinic@gmail.com',
description='Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser',
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url='https://github.com/napalm-automation/napalm-logs',
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
],
}
)
Set dev version and link CLI script | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
The setup script for napalm-logs
'''
import uuid
from setuptools import setup, find_packages
from pip.req import parse_requirements
__author__ = 'Mircea Ulinic <mircea.ulinic@gmail.com>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='napalm-logs',
version='0.0.1a0.dev5',
packages=find_packages(),
author='Mircea Ulinic',
author_email='mircea.ulinic@gmail.com',
description='Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser',
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url='https://github.com/napalm-automation/napalm-logs',
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'napalm-logs=napalm_logs.scripts.napalm_logs'
],
}
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
The setup script for napalm-logs
'''
import uuid
from setuptools import setup, find_packages
from pip.req import parse_requirements
__author__ = 'Mircea Ulinic <mircea.ulinic@gmail.com>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='napalm-logs',
version='0.1.0',
packages=find_packages(),
author='Mircea Ulinic',
author_email='mircea.ulinic@gmail.com',
description='Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser',
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url='https://github.com/napalm-automation/napalm-logs',
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
],
}
)
<commit_msg>Set dev version and link CLI script<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
The setup script for napalm-logs
'''
import uuid
from setuptools import setup, find_packages
from pip.req import parse_requirements
__author__ = 'Mircea Ulinic <mircea.ulinic@gmail.com>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='napalm-logs',
version='0.0.1a0.dev5',
packages=find_packages(),
author='Mircea Ulinic',
author_email='mircea.ulinic@gmail.com',
description='Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser',
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url='https://github.com/napalm-automation/napalm-logs',
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'napalm-logs=napalm_logs.scripts.napalm_logs'
],
}
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
The setup script for napalm-logs
'''
import uuid
from setuptools import setup, find_packages
from pip.req import parse_requirements
__author__ = 'Mircea Ulinic <mircea.ulinic@gmail.com>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='napalm-logs',
version='0.1.0',
packages=find_packages(),
author='Mircea Ulinic',
author_email='mircea.ulinic@gmail.com',
description='Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser',
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url='https://github.com/napalm-automation/napalm-logs',
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
],
}
)
Set dev version and link CLI script#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
The setup script for napalm-logs
'''
import uuid
from setuptools import setup, find_packages
from pip.req import parse_requirements
__author__ = 'Mircea Ulinic <mircea.ulinic@gmail.com>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='napalm-logs',
version='0.0.1a0.dev5',
packages=find_packages(),
author='Mircea Ulinic',
author_email='mircea.ulinic@gmail.com',
description='Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser',
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url='https://github.com/napalm-automation/napalm-logs',
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'napalm-logs=napalm_logs.scripts.napalm_logs'
],
}
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
The setup script for napalm-logs
'''
import uuid
from setuptools import setup, find_packages
from pip.req import parse_requirements
__author__ = 'Mircea Ulinic <mircea.ulinic@gmail.com>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='napalm-logs',
version='0.1.0',
packages=find_packages(),
author='Mircea Ulinic',
author_email='mircea.ulinic@gmail.com',
description='Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser',
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url='https://github.com/napalm-automation/napalm-logs',
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
],
}
)
<commit_msg>Set dev version and link CLI script<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
The setup script for napalm-logs
'''
import uuid
from setuptools import setup, find_packages
from pip.req import parse_requirements
__author__ = 'Mircea Ulinic <mircea.ulinic@gmail.com>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='napalm-logs',
version='0.0.1a0.dev5',
packages=find_packages(),
author='Mircea Ulinic',
author_email='mircea.ulinic@gmail.com',
description='Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser',
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url='https://github.com/napalm-automation/napalm-logs',
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'napalm-logs=napalm_logs.scripts.napalm_logs'
],
}
)
|
d3c59843fad8671b8e262b29b83d4f2a66bfe493 | setup.py | setup.py | from setuptools import setup
setup(
name='kibana',
packages=['kibana'],
version='0.6',
description='Kibana configuration index (.kibana) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
url='https://github.com/rfarley3/Kibana',
download_url='https://github.com/rfarley3/Kibana/tarball/0.4',
keywords=['kibana', 'config', 'import', 'export', 'mappings'],
classifiers=[],
install_requires=(
'elasticsearch',
'argparse',
'requests',
),
entry_points={
'console_scripts': [
'dotkibana = kibana.__main__:main',
]
},
)
| from setuptools import setup
setup(
name='kibana',
packages=['kibana'],
version='0.7',
description='Kibana configuration index (.kibana) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
url='https://github.com/rfarley3/Kibana',
download_url='https://github.com/rfarley3/Kibana/tarball/0.7',
keywords=['kibana', 'config', 'import', 'export', 'mappings'],
classifiers=[],
install_requires=(
'elasticsearch',
'argparse',
'requests',
),
entry_points={
'console_scripts': [
'dotkibana = kibana.__main__:main',
]
},
)
| Fix download_url, ver bump to coordinate | Fix download_url, ver bump to coordinate
| Python | mit | rfarley3/Kibana | from setuptools import setup
setup(
name='kibana',
packages=['kibana'],
version='0.6',
description='Kibana configuration index (.kibana) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
url='https://github.com/rfarley3/Kibana',
download_url='https://github.com/rfarley3/Kibana/tarball/0.4',
keywords=['kibana', 'config', 'import', 'export', 'mappings'],
classifiers=[],
install_requires=(
'elasticsearch',
'argparse',
'requests',
),
entry_points={
'console_scripts': [
'dotkibana = kibana.__main__:main',
]
},
)
Fix download_url, ver bump to coordinate | from setuptools import setup
setup(
name='kibana',
packages=['kibana'],
version='0.7',
description='Kibana configuration index (.kibana) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
url='https://github.com/rfarley3/Kibana',
download_url='https://github.com/rfarley3/Kibana/tarball/0.7',
keywords=['kibana', 'config', 'import', 'export', 'mappings'],
classifiers=[],
install_requires=(
'elasticsearch',
'argparse',
'requests',
),
entry_points={
'console_scripts': [
'dotkibana = kibana.__main__:main',
]
},
)
| <commit_before>from setuptools import setup
setup(
name='kibana',
packages=['kibana'],
version='0.6',
description='Kibana configuration index (.kibana) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
url='https://github.com/rfarley3/Kibana',
download_url='https://github.com/rfarley3/Kibana/tarball/0.4',
keywords=['kibana', 'config', 'import', 'export', 'mappings'],
classifiers=[],
install_requires=(
'elasticsearch',
'argparse',
'requests',
),
entry_points={
'console_scripts': [
'dotkibana = kibana.__main__:main',
]
},
)
<commit_msg>Fix download_url, ver bump to coordinate<commit_after> | from setuptools import setup
setup(
name='kibana',
packages=['kibana'],
version='0.7',
description='Kibana configuration index (.kibana) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
url='https://github.com/rfarley3/Kibana',
download_url='https://github.com/rfarley3/Kibana/tarball/0.7',
keywords=['kibana', 'config', 'import', 'export', 'mappings'],
classifiers=[],
install_requires=(
'elasticsearch',
'argparse',
'requests',
),
entry_points={
'console_scripts': [
'dotkibana = kibana.__main__:main',
]
},
)
| from setuptools import setup
setup(
name='kibana',
packages=['kibana'],
version='0.6',
description='Kibana configuration index (.kibana) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
url='https://github.com/rfarley3/Kibana',
download_url='https://github.com/rfarley3/Kibana/tarball/0.4',
keywords=['kibana', 'config', 'import', 'export', 'mappings'],
classifiers=[],
install_requires=(
'elasticsearch',
'argparse',
'requests',
),
entry_points={
'console_scripts': [
'dotkibana = kibana.__main__:main',
]
},
)
Fix download_url, ver bump to coordinatefrom setuptools import setup
setup(
name='kibana',
packages=['kibana'],
version='0.7',
description='Kibana configuration index (.kibana) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
url='https://github.com/rfarley3/Kibana',
download_url='https://github.com/rfarley3/Kibana/tarball/0.7',
keywords=['kibana', 'config', 'import', 'export', 'mappings'],
classifiers=[],
install_requires=(
'elasticsearch',
'argparse',
'requests',
),
entry_points={
'console_scripts': [
'dotkibana = kibana.__main__:main',
]
},
)
| <commit_before>from setuptools import setup
setup(
name='kibana',
packages=['kibana'],
version='0.6',
description='Kibana configuration index (.kibana) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
url='https://github.com/rfarley3/Kibana',
download_url='https://github.com/rfarley3/Kibana/tarball/0.4',
keywords=['kibana', 'config', 'import', 'export', 'mappings'],
classifiers=[],
install_requires=(
'elasticsearch',
'argparse',
'requests',
),
entry_points={
'console_scripts': [
'dotkibana = kibana.__main__:main',
]
},
)
<commit_msg>Fix download_url, ver bump to coordinate<commit_after>from setuptools import setup
setup(
name='kibana',
packages=['kibana'],
version='0.7',
description='Kibana configuration index (.kibana) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
url='https://github.com/rfarley3/Kibana',
download_url='https://github.com/rfarley3/Kibana/tarball/0.7',
keywords=['kibana', 'config', 'import', 'export', 'mappings'],
classifiers=[],
install_requires=(
'elasticsearch',
'argparse',
'requests',
),
entry_points={
'console_scripts': [
'dotkibana = kibana.__main__:main',
]
},
)
|
e45f056e92c7e5b2956072f7f2985ce1f7d213ed | setup.py | setup.py | from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='riboutils',
version='0.1',
description="This package contains utilities for other ribosome profiling projects.",
long_description=readme(),
keywords="ribosome profiling utilities translation",
url="",
author="Brandon Malone",
author_email="bmmalone@gmail.com",
license='MIT',
packages=['riboutils'],
install_requires = [
'numpy',
'pandas',
'scipy',
'tqdm',
'appdirs',
'statsmodels',
'misc[bio]'
],
extras_require = {
},
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
entry_points = {
'console_scripts': [
'extract-metagene-profiles=riboutils.extract_metagene_profiles:main',
'estimate-metagene-profile-bayes-factors=riboutils.estimate_metagene_profile_bayes_factors:main',
'select-periodic-offsets=riboutils.select_periodic_offsets:main'
]
},
zip_safe=False
)
| from setuptools import setup
console_scripts = [
'extract-metagene-profiles=riboutils.extract_metagene_profiles:main',
'estimate-metagene-profile-bayes-factors=riboutils.estimate_metagene_profile_bayes_factors:main',
'select-periodic-offsets=riboutils.select_periodic_offsets:main'
]
def readme():
with open('README.md') as f:
return f.read()
setup(name='riboutils',
version='0.1',
description="This package contains utilities for other ribosome profiling projects.",
long_description=readme(),
keywords="ribosome profiling utilities translation",
url="",
author="Brandon Malone",
author_email="bmmalone@gmail.com",
license='MIT',
packages=['riboutils'],
install_requires = [
'numpy',
'pandas',
'scipy',
'tqdm',
'appdirs',
'statsmodels',
'misc[bio]'
],
extras_require = {
},
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
entry_points = {
'console_scripts': console_scripts
},
zip_safe=False
)
| FIX moved bio script to misc.bio_utils | FIX moved bio script to misc.bio_utils
| Python | mit | dieterich-lab/riboseq-utils | from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='riboutils',
version='0.1',
description="This package contains utilities for other ribosome profiling projects.",
long_description=readme(),
keywords="ribosome profiling utilities translation",
url="",
author="Brandon Malone",
author_email="bmmalone@gmail.com",
license='MIT',
packages=['riboutils'],
install_requires = [
'numpy',
'pandas',
'scipy',
'tqdm',
'appdirs',
'statsmodels',
'misc[bio]'
],
extras_require = {
},
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
entry_points = {
'console_scripts': [
'extract-metagene-profiles=riboutils.extract_metagene_profiles:main',
'estimate-metagene-profile-bayes-factors=riboutils.estimate_metagene_profile_bayes_factors:main',
'select-periodic-offsets=riboutils.select_periodic_offsets:main'
]
},
zip_safe=False
)
FIX moved bio script to misc.bio_utils | from setuptools import setup
console_scripts = [
'extract-metagene-profiles=riboutils.extract_metagene_profiles:main',
'estimate-metagene-profile-bayes-factors=riboutils.estimate_metagene_profile_bayes_factors:main',
'select-periodic-offsets=riboutils.select_periodic_offsets:main'
]
def readme():
with open('README.md') as f:
return f.read()
setup(name='riboutils',
version='0.1',
description="This package contains utilities for other ribosome profiling projects.",
long_description=readme(),
keywords="ribosome profiling utilities translation",
url="",
author="Brandon Malone",
author_email="bmmalone@gmail.com",
license='MIT',
packages=['riboutils'],
install_requires = [
'numpy',
'pandas',
'scipy',
'tqdm',
'appdirs',
'statsmodels',
'misc[bio]'
],
extras_require = {
},
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
entry_points = {
'console_scripts': console_scripts
},
zip_safe=False
)
| <commit_before>from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='riboutils',
version='0.1',
description="This package contains utilities for other ribosome profiling projects.",
long_description=readme(),
keywords="ribosome profiling utilities translation",
url="",
author="Brandon Malone",
author_email="bmmalone@gmail.com",
license='MIT',
packages=['riboutils'],
install_requires = [
'numpy',
'pandas',
'scipy',
'tqdm',
'appdirs',
'statsmodels',
'misc[bio]'
],
extras_require = {
},
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
entry_points = {
'console_scripts': [
'extract-metagene-profiles=riboutils.extract_metagene_profiles:main',
'estimate-metagene-profile-bayes-factors=riboutils.estimate_metagene_profile_bayes_factors:main',
'select-periodic-offsets=riboutils.select_periodic_offsets:main'
]
},
zip_safe=False
)
<commit_msg>FIX moved bio script to misc.bio_utils<commit_after> | from setuptools import setup
console_scripts = [
'extract-metagene-profiles=riboutils.extract_metagene_profiles:main',
'estimate-metagene-profile-bayes-factors=riboutils.estimate_metagene_profile_bayes_factors:main',
'select-periodic-offsets=riboutils.select_periodic_offsets:main'
]
def readme():
with open('README.md') as f:
return f.read()
setup(name='riboutils',
version='0.1',
description="This package contains utilities for other ribosome profiling projects.",
long_description=readme(),
keywords="ribosome profiling utilities translation",
url="",
author="Brandon Malone",
author_email="bmmalone@gmail.com",
license='MIT',
packages=['riboutils'],
install_requires = [
'numpy',
'pandas',
'scipy',
'tqdm',
'appdirs',
'statsmodels',
'misc[bio]'
],
extras_require = {
},
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
entry_points = {
'console_scripts': console_scripts
},
zip_safe=False
)
| from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='riboutils',
version='0.1',
description="This package contains utilities for other ribosome profiling projects.",
long_description=readme(),
keywords="ribosome profiling utilities translation",
url="",
author="Brandon Malone",
author_email="bmmalone@gmail.com",
license='MIT',
packages=['riboutils'],
install_requires = [
'numpy',
'pandas',
'scipy',
'tqdm',
'appdirs',
'statsmodels',
'misc[bio]'
],
extras_require = {
},
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
entry_points = {
'console_scripts': [
'extract-metagene-profiles=riboutils.extract_metagene_profiles:main',
'estimate-metagene-profile-bayes-factors=riboutils.estimate_metagene_profile_bayes_factors:main',
'select-periodic-offsets=riboutils.select_periodic_offsets:main'
]
},
zip_safe=False
)
FIX moved bio script to misc.bio_utilsfrom setuptools import setup
console_scripts = [
'extract-metagene-profiles=riboutils.extract_metagene_profiles:main',
'estimate-metagene-profile-bayes-factors=riboutils.estimate_metagene_profile_bayes_factors:main',
'select-periodic-offsets=riboutils.select_periodic_offsets:main'
]
def readme():
with open('README.md') as f:
return f.read()
setup(name='riboutils',
version='0.1',
description="This package contains utilities for other ribosome profiling projects.",
long_description=readme(),
keywords="ribosome profiling utilities translation",
url="",
author="Brandon Malone",
author_email="bmmalone@gmail.com",
license='MIT',
packages=['riboutils'],
install_requires = [
'numpy',
'pandas',
'scipy',
'tqdm',
'appdirs',
'statsmodels',
'misc[bio]'
],
extras_require = {
},
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
entry_points = {
'console_scripts': console_scripts
},
zip_safe=False
)
| <commit_before>from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='riboutils',
version='0.1',
description="This package contains utilities for other ribosome profiling projects.",
long_description=readme(),
keywords="ribosome profiling utilities translation",
url="",
author="Brandon Malone",
author_email="bmmalone@gmail.com",
license='MIT',
packages=['riboutils'],
install_requires = [
'numpy',
'pandas',
'scipy',
'tqdm',
'appdirs',
'statsmodels',
'misc[bio]'
],
extras_require = {
},
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
entry_points = {
'console_scripts': [
'extract-metagene-profiles=riboutils.extract_metagene_profiles:main',
'estimate-metagene-profile-bayes-factors=riboutils.estimate_metagene_profile_bayes_factors:main',
'select-periodic-offsets=riboutils.select_periodic_offsets:main'
]
},
zip_safe=False
)
<commit_msg>FIX moved bio script to misc.bio_utils<commit_after>from setuptools import setup
console_scripts = [
'extract-metagene-profiles=riboutils.extract_metagene_profiles:main',
'estimate-metagene-profile-bayes-factors=riboutils.estimate_metagene_profile_bayes_factors:main',
'select-periodic-offsets=riboutils.select_periodic_offsets:main'
]
def readme():
with open('README.md') as f:
return f.read()
setup(name='riboutils',
version='0.1',
description="This package contains utilities for other ribosome profiling projects.",
long_description=readme(),
keywords="ribosome profiling utilities translation",
url="",
author="Brandon Malone",
author_email="bmmalone@gmail.com",
license='MIT',
packages=['riboutils'],
install_requires = [
'numpy',
'pandas',
'scipy',
'tqdm',
'appdirs',
'statsmodels',
'misc[bio]'
],
extras_require = {
},
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
entry_points = {
'console_scripts': console_scripts
},
zip_safe=False
)
|
01d2d7abbb0ba949760af6eb70a7b101df09a50a | setup.py | setup.py | # import multiprocessing to avoid this bug (http://bugs.python.org/issue15881#msg170215)
import multiprocessing
assert multiprocessing
import re
from setuptools import setup, find_packages
def get_version():
"""
Extracts the version number from the version.py file.
"""
VERSION_FILE = 'entity_emailer/version.py'
mo = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', open(VERSION_FILE, 'rt').read(), re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError('Unable to find version string in {0}.'.format(VERSION_FILE))
setup(
name='',
version=get_version(),
description='',
long_description=open('README.md').read(),
url='',
author='',
author_email='opensource@ambition.com',
keywords='',
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
],
license='MIT',
install_requires=[
'django>=1.6',
'django-entity>=0.4.1',
'celery>=3.1',
],
tests_require=[
'django-nose',
'south',
'mock',
'django-dynamic-fixture'
],
test_suite='run_tests.run_tests',
include_package_data=True,
)
| # import multiprocessing to avoid this bug (http://bugs.python.org/issue15881#msg170215)
import multiprocessing
assert multiprocessing
import re
from setuptools import setup, find_packages
def get_version():
"""
Extracts the version number from the version.py file.
"""
VERSION_FILE = 'entity_emailer/version.py'
mo = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', open(VERSION_FILE, 'rt').read(), re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError('Unable to find version string in {0}.'.format(VERSION_FILE))
setup(
name='',
version=get_version(),
description='',
long_description=open('README.md').read(),
url='',
author='',
author_email='opensource@ambition.com',
keywords='',
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
],
license='MIT',
install_requires=[
'django>=1.6',
'django-entity>=0.4.1',
'celery>=3.1',
],
tests_require=[
'django-dynamic-fixture',
'django-nose',
'freezegun',
'mock',
'south',
],
test_suite='run_tests.run_tests',
include_package_data=True,
)
| Add freezegun requirement for tests. | Add freezegun requirement for tests.
| Python | mit | ambitioninc/django-entity-emailer,ambitioninc/django-entity-emailer,wesleykendall/django-entity-emailer,wesleykendall/django-entity-emailer | # import multiprocessing to avoid this bug (http://bugs.python.org/issue15881#msg170215)
import multiprocessing
assert multiprocessing
import re
from setuptools import setup, find_packages
def get_version():
"""
Extracts the version number from the version.py file.
"""
VERSION_FILE = 'entity_emailer/version.py'
mo = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', open(VERSION_FILE, 'rt').read(), re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError('Unable to find version string in {0}.'.format(VERSION_FILE))
setup(
name='',
version=get_version(),
description='',
long_description=open('README.md').read(),
url='',
author='',
author_email='opensource@ambition.com',
keywords='',
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
],
license='MIT',
install_requires=[
'django>=1.6',
'django-entity>=0.4.1',
'celery>=3.1',
],
tests_require=[
'django-nose',
'south',
'mock',
'django-dynamic-fixture'
],
test_suite='run_tests.run_tests',
include_package_data=True,
)
Add freezegun requirement for tests. | # import multiprocessing to avoid this bug (http://bugs.python.org/issue15881#msg170215)
import multiprocessing
assert multiprocessing
import re
from setuptools import setup, find_packages
def get_version():
"""
Extracts the version number from the version.py file.
"""
VERSION_FILE = 'entity_emailer/version.py'
mo = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', open(VERSION_FILE, 'rt').read(), re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError('Unable to find version string in {0}.'.format(VERSION_FILE))
setup(
name='',
version=get_version(),
description='',
long_description=open('README.md').read(),
url='',
author='',
author_email='opensource@ambition.com',
keywords='',
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
],
license='MIT',
install_requires=[
'django>=1.6',
'django-entity>=0.4.1',
'celery>=3.1',
],
tests_require=[
'django-dynamic-fixture',
'django-nose',
'freezegun',
'mock',
'south',
],
test_suite='run_tests.run_tests',
include_package_data=True,
)
| <commit_before># import multiprocessing to avoid this bug (http://bugs.python.org/issue15881#msg170215)
import multiprocessing
assert multiprocessing
import re
from setuptools import setup, find_packages
def get_version():
"""
Extracts the version number from the version.py file.
"""
VERSION_FILE = 'entity_emailer/version.py'
mo = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', open(VERSION_FILE, 'rt').read(), re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError('Unable to find version string in {0}.'.format(VERSION_FILE))
setup(
name='',
version=get_version(),
description='',
long_description=open('README.md').read(),
url='',
author='',
author_email='opensource@ambition.com',
keywords='',
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
],
license='MIT',
install_requires=[
'django>=1.6',
'django-entity>=0.4.1',
'celery>=3.1',
],
tests_require=[
'django-nose',
'south',
'mock',
'django-dynamic-fixture'
],
test_suite='run_tests.run_tests',
include_package_data=True,
)
<commit_msg>Add freezegun requirement for tests.<commit_after> | # import multiprocessing to avoid this bug (http://bugs.python.org/issue15881#msg170215)
import multiprocessing
assert multiprocessing
import re
from setuptools import setup, find_packages
def get_version():
"""
Extracts the version number from the version.py file.
"""
VERSION_FILE = 'entity_emailer/version.py'
mo = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', open(VERSION_FILE, 'rt').read(), re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError('Unable to find version string in {0}.'.format(VERSION_FILE))
setup(
name='',
version=get_version(),
description='',
long_description=open('README.md').read(),
url='',
author='',
author_email='opensource@ambition.com',
keywords='',
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
],
license='MIT',
install_requires=[
'django>=1.6',
'django-entity>=0.4.1',
'celery>=3.1',
],
tests_require=[
'django-dynamic-fixture',
'django-nose',
'freezegun',
'mock',
'south',
],
test_suite='run_tests.run_tests',
include_package_data=True,
)
| # import multiprocessing to avoid this bug (http://bugs.python.org/issue15881#msg170215)
import multiprocessing
assert multiprocessing
import re
from setuptools import setup, find_packages
def get_version():
"""
Extracts the version number from the version.py file.
"""
VERSION_FILE = 'entity_emailer/version.py'
mo = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', open(VERSION_FILE, 'rt').read(), re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError('Unable to find version string in {0}.'.format(VERSION_FILE))
setup(
name='',
version=get_version(),
description='',
long_description=open('README.md').read(),
url='',
author='',
author_email='opensource@ambition.com',
keywords='',
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
],
license='MIT',
install_requires=[
'django>=1.6',
'django-entity>=0.4.1',
'celery>=3.1',
],
tests_require=[
'django-nose',
'south',
'mock',
'django-dynamic-fixture'
],
test_suite='run_tests.run_tests',
include_package_data=True,
)
Add freezegun requirement for tests.# import multiprocessing to avoid this bug (http://bugs.python.org/issue15881#msg170215)
import multiprocessing
assert multiprocessing
import re
from setuptools import setup, find_packages
def get_version():
"""
Extracts the version number from the version.py file.
"""
VERSION_FILE = 'entity_emailer/version.py'
mo = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', open(VERSION_FILE, 'rt').read(), re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError('Unable to find version string in {0}.'.format(VERSION_FILE))
setup(
name='',
version=get_version(),
description='',
long_description=open('README.md').read(),
url='',
author='',
author_email='opensource@ambition.com',
keywords='',
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
],
license='MIT',
install_requires=[
'django>=1.6',
'django-entity>=0.4.1',
'celery>=3.1',
],
tests_require=[
'django-dynamic-fixture',
'django-nose',
'freezegun',
'mock',
'south',
],
test_suite='run_tests.run_tests',
include_package_data=True,
)
| <commit_before># import multiprocessing to avoid this bug (http://bugs.python.org/issue15881#msg170215)
import multiprocessing
assert multiprocessing
import re
from setuptools import setup, find_packages
def get_version():
"""
Extracts the version number from the version.py file.
"""
VERSION_FILE = 'entity_emailer/version.py'
mo = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', open(VERSION_FILE, 'rt').read(), re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError('Unable to find version string in {0}.'.format(VERSION_FILE))
setup(
name='',
version=get_version(),
description='',
long_description=open('README.md').read(),
url='',
author='',
author_email='opensource@ambition.com',
keywords='',
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
],
license='MIT',
install_requires=[
'django>=1.6',
'django-entity>=0.4.1',
'celery>=3.1',
],
tests_require=[
'django-nose',
'south',
'mock',
'django-dynamic-fixture'
],
test_suite='run_tests.run_tests',
include_package_data=True,
)
<commit_msg>Add freezegun requirement for tests.<commit_after># import multiprocessing to avoid this bug (http://bugs.python.org/issue15881#msg170215)
import multiprocessing
assert multiprocessing
import re
from setuptools import setup, find_packages
def get_version():
"""
Extracts the version number from the version.py file.
"""
VERSION_FILE = 'entity_emailer/version.py'
mo = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', open(VERSION_FILE, 'rt').read(), re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError('Unable to find version string in {0}.'.format(VERSION_FILE))
setup(
name='',
version=get_version(),
description='',
long_description=open('README.md').read(),
url='',
author='',
author_email='opensource@ambition.com',
keywords='',
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
],
license='MIT',
install_requires=[
'django>=1.6',
'django-entity>=0.4.1',
'celery>=3.1',
],
tests_require=[
'django-dynamic-fixture',
'django-nose',
'freezegun',
'mock',
'south',
],
test_suite='run_tests.run_tests',
include_package_data=True,
)
|
e99aa7ecb281675134efd0ff8a0cd9dfb53731b0 | setup.py | setup.py | from setuptools import setup
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name="gevent-socketio",
version="0.3.5-rc2",
description=(
"SocketIO server based on the Gevent pywsgi server, "
"a Python network library"),
author="Jeffrey Gelens",
author_email="jeffrey@noppo.pro",
maintainer="Alexandre Bourget",
maintainer_email="alex@bourget.cc",
license="BSD",
url="https://github.com/abourget/gevent-socketio",
download_url="https://github.com/abourget/gevent-socketio",
install_requires=("gevent-websocket",),
setup_requires=('versiontools >= 1.7'),
cmdclass = {'test': PyTest},
tests_require=['pytest', 'mock'],
packages=find_packages(exclude=["examples", "tests"]),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
],
)
| from setuptools import setup
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name="gevent-socketio",
version="0.3.5-rc3",
description=(
"SocketIO server based on the Gevent pywsgi server, "
"a Python network library"),
author="Jeffrey Gelens",
author_email="jeffrey@noppo.pro",
maintainer="Alexandre Bourget",
maintainer_email="alex@bourget.cc",
license="BSD",
url="https://github.com/abourget/gevent-socketio",
download_url="https://github.com/abourget/gevent-socketio",
install_requires=("gevent-websocket",),
setup_requires=('versiontools >= 1.7'),
cmdclass = {'test': PyTest},
tests_require=['pytest', 'mock'],
packages=find_packages(exclude=["examples", "tests"]),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
],
)
| Increment rc version to rc3 | Increment rc version to rc3 | Python | bsd-3-clause | Eugeny/gevent-socketio,gutomaia/gevent-socketio,smurfix/gevent-socketio,bobvandevijver/gevent-socketio,arnuschky/gevent-socketio,kazmiruk/gevent-socketio,gutomaia/gevent-socketio,arnuschky/gevent-socketio,abourget/gevent-socketio,smurfix/gevent-socketio,gutomaia/gevent-socketio,theskumar-archive/gevent-socketio,yacneyac/gevent-socketio,kazmiruk/gevent-socketio,theskumar-archive/gevent-socketio,smurfix/gevent-socketio,abourget/gevent-socketio,bobvandevijver/gevent-socketio,hzruandd/gevent-socketio,hzruandd/gevent-socketio,yacneyac/gevent-socketio,Eugeny/gevent-socketio | from setuptools import setup
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name="gevent-socketio",
version="0.3.5-rc2",
description=(
"SocketIO server based on the Gevent pywsgi server, "
"a Python network library"),
author="Jeffrey Gelens",
author_email="jeffrey@noppo.pro",
maintainer="Alexandre Bourget",
maintainer_email="alex@bourget.cc",
license="BSD",
url="https://github.com/abourget/gevent-socketio",
download_url="https://github.com/abourget/gevent-socketio",
install_requires=("gevent-websocket",),
setup_requires=('versiontools >= 1.7'),
cmdclass = {'test': PyTest},
tests_require=['pytest', 'mock'],
packages=find_packages(exclude=["examples", "tests"]),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
],
)
Increment rc version to rc3 | from setuptools import setup
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name="gevent-socketio",
version="0.3.5-rc3",
description=(
"SocketIO server based on the Gevent pywsgi server, "
"a Python network library"),
author="Jeffrey Gelens",
author_email="jeffrey@noppo.pro",
maintainer="Alexandre Bourget",
maintainer_email="alex@bourget.cc",
license="BSD",
url="https://github.com/abourget/gevent-socketio",
download_url="https://github.com/abourget/gevent-socketio",
install_requires=("gevent-websocket",),
setup_requires=('versiontools >= 1.7'),
cmdclass = {'test': PyTest},
tests_require=['pytest', 'mock'],
packages=find_packages(exclude=["examples", "tests"]),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
],
)
| <commit_before>from setuptools import setup
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name="gevent-socketio",
version="0.3.5-rc2",
description=(
"SocketIO server based on the Gevent pywsgi server, "
"a Python network library"),
author="Jeffrey Gelens",
author_email="jeffrey@noppo.pro",
maintainer="Alexandre Bourget",
maintainer_email="alex@bourget.cc",
license="BSD",
url="https://github.com/abourget/gevent-socketio",
download_url="https://github.com/abourget/gevent-socketio",
install_requires=("gevent-websocket",),
setup_requires=('versiontools >= 1.7'),
cmdclass = {'test': PyTest},
tests_require=['pytest', 'mock'],
packages=find_packages(exclude=["examples", "tests"]),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
],
)
<commit_msg>Increment rc version to rc3<commit_after> | from setuptools import setup
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name="gevent-socketio",
version="0.3.5-rc3",
description=(
"SocketIO server based on the Gevent pywsgi server, "
"a Python network library"),
author="Jeffrey Gelens",
author_email="jeffrey@noppo.pro",
maintainer="Alexandre Bourget",
maintainer_email="alex@bourget.cc",
license="BSD",
url="https://github.com/abourget/gevent-socketio",
download_url="https://github.com/abourget/gevent-socketio",
install_requires=("gevent-websocket",),
setup_requires=('versiontools >= 1.7'),
cmdclass = {'test': PyTest},
tests_require=['pytest', 'mock'],
packages=find_packages(exclude=["examples", "tests"]),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
],
)
| from setuptools import setup
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name="gevent-socketio",
version="0.3.5-rc2",
description=(
"SocketIO server based on the Gevent pywsgi server, "
"a Python network library"),
author="Jeffrey Gelens",
author_email="jeffrey@noppo.pro",
maintainer="Alexandre Bourget",
maintainer_email="alex@bourget.cc",
license="BSD",
url="https://github.com/abourget/gevent-socketio",
download_url="https://github.com/abourget/gevent-socketio",
install_requires=("gevent-websocket",),
setup_requires=('versiontools >= 1.7'),
cmdclass = {'test': PyTest},
tests_require=['pytest', 'mock'],
packages=find_packages(exclude=["examples", "tests"]),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
],
)
Increment rc version to rc3from setuptools import setup
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name="gevent-socketio",
version="0.3.5-rc3",
description=(
"SocketIO server based on the Gevent pywsgi server, "
"a Python network library"),
author="Jeffrey Gelens",
author_email="jeffrey@noppo.pro",
maintainer="Alexandre Bourget",
maintainer_email="alex@bourget.cc",
license="BSD",
url="https://github.com/abourget/gevent-socketio",
download_url="https://github.com/abourget/gevent-socketio",
install_requires=("gevent-websocket",),
setup_requires=('versiontools >= 1.7'),
cmdclass = {'test': PyTest},
tests_require=['pytest', 'mock'],
packages=find_packages(exclude=["examples", "tests"]),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
],
)
| <commit_before>from setuptools import setup
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name="gevent-socketio",
version="0.3.5-rc2",
description=(
"SocketIO server based on the Gevent pywsgi server, "
"a Python network library"),
author="Jeffrey Gelens",
author_email="jeffrey@noppo.pro",
maintainer="Alexandre Bourget",
maintainer_email="alex@bourget.cc",
license="BSD",
url="https://github.com/abourget/gevent-socketio",
download_url="https://github.com/abourget/gevent-socketio",
install_requires=("gevent-websocket",),
setup_requires=('versiontools >= 1.7'),
cmdclass = {'test': PyTest},
tests_require=['pytest', 'mock'],
packages=find_packages(exclude=["examples", "tests"]),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
],
)
<commit_msg>Increment rc version to rc3<commit_after>from setuptools import setup
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
pytest.main(self.test_args)
setup(
name="gevent-socketio",
version="0.3.5-rc3",
description=(
"SocketIO server based on the Gevent pywsgi server, "
"a Python network library"),
author="Jeffrey Gelens",
author_email="jeffrey@noppo.pro",
maintainer="Alexandre Bourget",
maintainer_email="alex@bourget.cc",
license="BSD",
url="https://github.com/abourget/gevent-socketio",
download_url="https://github.com/abourget/gevent-socketio",
install_requires=("gevent-websocket",),
setup_requires=('versiontools >= 1.7'),
cmdclass = {'test': PyTest},
tests_require=['pytest', 'mock'],
packages=find_packages(exclude=["examples", "tests"]),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
"Intended Audience :: Developers",
],
)
|
0c76289835e31b6f92e550745002730e1e7efaf2 | setup.py | setup.py | from setuptools import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '2.1.0',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=2.1.0',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy>=1.10.0', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide', 'pandas', 'cmocean'],
classifiers = []
)
| from setuptools import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '2.1.0',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=2.1.0',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy>=1.13.0', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide', 'pandas', 'cmocean'],
classifiers = []
)
| Update the numpy requirement since we use numpy.isin, which is only available in numpy 1.13.0 onwards. | Update the numpy requirement since we use numpy.isin, which is only available in numpy 1.13.0 onwards.
| Python | mit | pwcazenave/PyFVCOM | from setuptools import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '2.1.0',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=2.1.0',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy>=1.10.0', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide', 'pandas', 'cmocean'],
classifiers = []
)
Update the numpy requirement since we use numpy.isin, which is only available in numpy 1.13.0 onwards. | from setuptools import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '2.1.0',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=2.1.0',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy>=1.13.0', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide', 'pandas', 'cmocean'],
classifiers = []
)
| <commit_before>from setuptools import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '2.1.0',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=2.1.0',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy>=1.10.0', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide', 'pandas', 'cmocean'],
classifiers = []
)
<commit_msg>Update the numpy requirement since we use numpy.isin, which is only available in numpy 1.13.0 onwards.<commit_after> | from setuptools import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '2.1.0',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=2.1.0',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy>=1.13.0', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide', 'pandas', 'cmocean'],
classifiers = []
)
| from setuptools import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '2.1.0',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=2.1.0',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy>=1.10.0', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide', 'pandas', 'cmocean'],
classifiers = []
)
Update the numpy requirement since we use numpy.isin, which is only available in numpy 1.13.0 onwards.from setuptools import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '2.1.0',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=2.1.0',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy>=1.13.0', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide', 'pandas', 'cmocean'],
classifiers = []
)
| <commit_before>from setuptools import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '2.1.0',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=2.1.0',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy>=1.10.0', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide', 'pandas', 'cmocean'],
classifiers = []
)
<commit_msg>Update the numpy requirement since we use numpy.isin, which is only available in numpy 1.13.0 onwards.<commit_after>from setuptools import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '2.1.0',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=2.1.0',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy>=1.13.0', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide', 'pandas', 'cmocean'],
classifiers = []
)
|
ccccb43aafd87bf77961dfe1fb25e00c63255cd1 | setup.py | setup.py | #!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/developersociety/blanc-contentfiles',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
| #!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/developersociety/blanc-contentfiles',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
| Update supported Python and Django versions | Update supported Python and Django versions
| Python | bsd-3-clause | blancltd/blanc-contentfiles | #!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/developersociety/blanc-contentfiles',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
Update supported Python and Django versions | #!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/developersociety/blanc-contentfiles',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
| <commit_before>#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/developersociety/blanc-contentfiles',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
<commit_msg>Update supported Python and Django versions<commit_after> | #!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/developersociety/blanc-contentfiles',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
| #!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/developersociety/blanc-contentfiles',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
Update supported Python and Django versions#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/developersociety/blanc-contentfiles',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
| <commit_before>#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/developersociety/blanc-contentfiles',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
<commit_msg>Update supported Python and Django versions<commit_after>#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/developersociety/blanc-contentfiles',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
|
0ce6ea2ca75c3839b0a1e41f0fa32e5a9816f653 | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.3',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="tomasz@wysocki.info",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
],
)
| #!/usr/bin/env python
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.4',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="tomasz@wysocki.info",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
'facebook_auth.management',
'facebook_auth.management.commands',
],
)
| Add management commands to package. | Add management commands to package.
Change-Id: I6c35981fbe47639e72066ddd802eb4d4d4d2d4a0
Reviewed-on: http://review.pozytywnie.pl:8080/19737
Reviewed-by: Jan <14e793d896ddc8ca6911747228e86464cf420065@pozytywnie.pl>
Tested-by: Jenkins
| Python | mit | jgoclawski/django-facebook-auth,jgoclawski/django-facebook-auth,pozytywnie/django-facebook-auth,pozytywnie/django-facebook-auth | #!/usr/bin/env python
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.3',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="tomasz@wysocki.info",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
],
)
Add management commands to package.
Change-Id: I6c35981fbe47639e72066ddd802eb4d4d4d2d4a0
Reviewed-on: http://review.pozytywnie.pl:8080/19737
Reviewed-by: Jan <14e793d896ddc8ca6911747228e86464cf420065@pozytywnie.pl>
Tested-by: Jenkins | #!/usr/bin/env python
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.4',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="tomasz@wysocki.info",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
'facebook_auth.management',
'facebook_auth.management.commands',
],
)
| <commit_before>#!/usr/bin/env python
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.3',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="tomasz@wysocki.info",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
],
)
<commit_msg>Add management commands to package.
Change-Id: I6c35981fbe47639e72066ddd802eb4d4d4d2d4a0
Reviewed-on: http://review.pozytywnie.pl:8080/19737
Reviewed-by: Jan <14e793d896ddc8ca6911747228e86464cf420065@pozytywnie.pl>
Tested-by: Jenkins<commit_after> | #!/usr/bin/env python
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.4',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="tomasz@wysocki.info",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
'facebook_auth.management',
'facebook_auth.management.commands',
],
)
| #!/usr/bin/env python
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.3',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="tomasz@wysocki.info",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
],
)
Add management commands to package.
Change-Id: I6c35981fbe47639e72066ddd802eb4d4d4d2d4a0
Reviewed-on: http://review.pozytywnie.pl:8080/19737
Reviewed-by: Jan <14e793d896ddc8ca6911747228e86464cf420065@pozytywnie.pl>
Tested-by: Jenkins#!/usr/bin/env python
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.4',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="tomasz@wysocki.info",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
'facebook_auth.management',
'facebook_auth.management.commands',
],
)
| <commit_before>#!/usr/bin/env python
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.3',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="tomasz@wysocki.info",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
],
)
<commit_msg>Add management commands to package.
Change-Id: I6c35981fbe47639e72066ddd802eb4d4d4d2d4a0
Reviewed-on: http://review.pozytywnie.pl:8080/19737
Reviewed-by: Jan <14e793d896ddc8ca6911747228e86464cf420065@pozytywnie.pl>
Tested-by: Jenkins<commit_after>#!/usr/bin/env python
from distutils.core import setup
def read(name):
from os import path
return open(path.join(path.dirname(__file__), name)).read()
setup(
name='django-facebook-auth',
version='3.3.4',
description="Authorisation app for Facebook API.",
long_description=read("README.rst"),
maintainer="Tomasz Wysocki",
maintainer_email="tomasz@wysocki.info",
install_requires=(
'celery',
'django<1.6',
'facepy',
),
packages=[
'facebook_auth',
'facebook_auth.migrations',
'facebook_auth.management',
'facebook_auth.management.commands',
],
)
|
b7a9e79a80d1be827c803308c0abd651920c0b83 | setup.py | setup.py | import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.1.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
| import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.2.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
| Set the version to 1.2.0 | Set the version to 1.2.0
| Python | mit | xgfone/xutils,xgfone/pycom | import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.1.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
Set the version to 1.2.0 | import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.2.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
| <commit_before>import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.1.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
<commit_msg>Set the version to 1.2.0<commit_after> | import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.2.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
| import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.1.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
Set the version to 1.2.0import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.2.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
| <commit_before>import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.1.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
<commit_msg>Set the version to 1.2.0<commit_after>import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.2.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="xgfone@126.com",
maintainer="xgfone",
maintainer_email="xgfone@126.com",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
|
b6c743d9b3d94b9875b5475f7f567fe15c5cb024 | setup.py | setup.py | from reviewboard.extensions.packaging import setup
GITHUB_URL = 'https://github.com/joshguffin/rboard_bugzilla'
VERSION = "0.1"
PACKAGE = 'rboard_bugzilla'
EXTENSION = '{0} = {0}.extension:BugzillaExtension'.format(PACKAGE),
setup(
name=PACKAGE,
version=VERSION,
description='Post to BZs',
url=GITHUB_URL,
author='Josh Guffin',
author_email='josh.guffin@gmail.com',
packages=[PACKAGE],
install_requires=['bugzillatools>=0.4', 'ReviewBoard>=2.0.3'],
entry_points={'reviewboard.extensions': [EXTENSION]}
)
| from reviewboard.extensions.packaging import setup
GITHUB_URL = 'https://github.com/joshguffin/rboard_bugzilla'
VERSION = "0.1"
PACKAGE = 'rboard_bugzilla'
EXTENSION = '{0} = {0}.extension:BugzillaExtension'.format(PACKAGE),
setup(
name=PACKAGE,
version=VERSION,
description='Post to BZs',
url=GITHUB_URL,
author='Josh Guffin',
author_email='josh.guffin@gmail.com',
packages=[PACKAGE],
install_requires=['bugzillatools>=0.4'],
entry_points={'reviewboard.extensions': [EXTENSION]}
)
| Remove reviewboard requirement; can cause unexpected updates | Remove reviewboard requirement; can cause unexpected updates
| Python | mit | joshguffin/rboard_bugzilla | from reviewboard.extensions.packaging import setup
GITHUB_URL = 'https://github.com/joshguffin/rboard_bugzilla'
VERSION = "0.1"
PACKAGE = 'rboard_bugzilla'
EXTENSION = '{0} = {0}.extension:BugzillaExtension'.format(PACKAGE),
setup(
name=PACKAGE,
version=VERSION,
description='Post to BZs',
url=GITHUB_URL,
author='Josh Guffin',
author_email='josh.guffin@gmail.com',
packages=[PACKAGE],
install_requires=['bugzillatools>=0.4', 'ReviewBoard>=2.0.3'],
entry_points={'reviewboard.extensions': [EXTENSION]}
)
Remove reviewboard requirement; can cause unexpected updates | from reviewboard.extensions.packaging import setup
GITHUB_URL = 'https://github.com/joshguffin/rboard_bugzilla'
VERSION = "0.1"
PACKAGE = 'rboard_bugzilla'
EXTENSION = '{0} = {0}.extension:BugzillaExtension'.format(PACKAGE),
setup(
name=PACKAGE,
version=VERSION,
description='Post to BZs',
url=GITHUB_URL,
author='Josh Guffin',
author_email='josh.guffin@gmail.com',
packages=[PACKAGE],
install_requires=['bugzillatools>=0.4'],
entry_points={'reviewboard.extensions': [EXTENSION]}
)
| <commit_before>from reviewboard.extensions.packaging import setup
GITHUB_URL = 'https://github.com/joshguffin/rboard_bugzilla'
VERSION = "0.1"
PACKAGE = 'rboard_bugzilla'
EXTENSION = '{0} = {0}.extension:BugzillaExtension'.format(PACKAGE),
setup(
name=PACKAGE,
version=VERSION,
description='Post to BZs',
url=GITHUB_URL,
author='Josh Guffin',
author_email='josh.guffin@gmail.com',
packages=[PACKAGE],
install_requires=['bugzillatools>=0.4', 'ReviewBoard>=2.0.3'],
entry_points={'reviewboard.extensions': [EXTENSION]}
)
<commit_msg>Remove reviewboard requirement; can cause unexpected updates<commit_after> | from reviewboard.extensions.packaging import setup
GITHUB_URL = 'https://github.com/joshguffin/rboard_bugzilla'
VERSION = "0.1"
PACKAGE = 'rboard_bugzilla'
EXTENSION = '{0} = {0}.extension:BugzillaExtension'.format(PACKAGE),
setup(
name=PACKAGE,
version=VERSION,
description='Post to BZs',
url=GITHUB_URL,
author='Josh Guffin',
author_email='josh.guffin@gmail.com',
packages=[PACKAGE],
install_requires=['bugzillatools>=0.4'],
entry_points={'reviewboard.extensions': [EXTENSION]}
)
| from reviewboard.extensions.packaging import setup
GITHUB_URL = 'https://github.com/joshguffin/rboard_bugzilla'
VERSION = "0.1"
PACKAGE = 'rboard_bugzilla'
EXTENSION = '{0} = {0}.extension:BugzillaExtension'.format(PACKAGE),
setup(
name=PACKAGE,
version=VERSION,
description='Post to BZs',
url=GITHUB_URL,
author='Josh Guffin',
author_email='josh.guffin@gmail.com',
packages=[PACKAGE],
install_requires=['bugzillatools>=0.4', 'ReviewBoard>=2.0.3'],
entry_points={'reviewboard.extensions': [EXTENSION]}
)
Remove reviewboard requirement; can cause unexpected updatesfrom reviewboard.extensions.packaging import setup
GITHUB_URL = 'https://github.com/joshguffin/rboard_bugzilla'
VERSION = "0.1"
PACKAGE = 'rboard_bugzilla'
EXTENSION = '{0} = {0}.extension:BugzillaExtension'.format(PACKAGE),
setup(
name=PACKAGE,
version=VERSION,
description='Post to BZs',
url=GITHUB_URL,
author='Josh Guffin',
author_email='josh.guffin@gmail.com',
packages=[PACKAGE],
install_requires=['bugzillatools>=0.4'],
entry_points={'reviewboard.extensions': [EXTENSION]}
)
| <commit_before>from reviewboard.extensions.packaging import setup
GITHUB_URL = 'https://github.com/joshguffin/rboard_bugzilla'
VERSION = "0.1"
PACKAGE = 'rboard_bugzilla'
EXTENSION = '{0} = {0}.extension:BugzillaExtension'.format(PACKAGE),
setup(
name=PACKAGE,
version=VERSION,
description='Post to BZs',
url=GITHUB_URL,
author='Josh Guffin',
author_email='josh.guffin@gmail.com',
packages=[PACKAGE],
install_requires=['bugzillatools>=0.4', 'ReviewBoard>=2.0.3'],
entry_points={'reviewboard.extensions': [EXTENSION]}
)
<commit_msg>Remove reviewboard requirement; can cause unexpected updates<commit_after>from reviewboard.extensions.packaging import setup
GITHUB_URL = 'https://github.com/joshguffin/rboard_bugzilla'
VERSION = "0.1"
PACKAGE = 'rboard_bugzilla'
EXTENSION = '{0} = {0}.extension:BugzillaExtension'.format(PACKAGE),
setup(
name=PACKAGE,
version=VERSION,
description='Post to BZs',
url=GITHUB_URL,
author='Josh Guffin',
author_email='josh.guffin@gmail.com',
packages=[PACKAGE],
install_requires=['bugzillatools>=0.4'],
entry_points={'reviewboard.extensions': [EXTENSION]}
)
|
bec98cca8a765743cf990f5807f5d52b95dd5d9e | setup.py | setup.py | #!/usr/bin/env python3
# encoding: utf-8
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
with open('glooey/__init__.py') as file:
version_pattern = re.compile("__version__ = '(.*)'")
version = version_pattern.search(file.read()).group(1)
with open('README.rst') as file:
readme = file.read()
setup(
name='glooey',
version=version,
author='Kale Kundert',
author_email='kale@thekunderts.net',
description='An object-oriented GUI library for pyglet.',
long_description=readme,
url='https://github.com/kxgames/glooey',
packages=[
'glooey',
],
include_package_data=True,
install_requires=[
'pyglet',
'more_itertools',
'vecrec',
'autoprop',
],
license='MIT',
zip_safe=False,
keywords=[
'glooey',
'pyglet',
'gui',
'library',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Topic :: Games/Entertainment',
'Topic :: Software Development :: User Interfaces',
'Topic :: Software Development :: Libraries',
],
)
| #!/usr/bin/env python3
# encoding: utf-8
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
with open('glooey/__init__.py') as file:
version_pattern = re.compile("__version__ = '(.*)'")
version = version_pattern.search(file.read()).group(1)
with open('README.rst') as file:
readme = file.read()
setup(
name='glooey',
version=version,
author='Kale Kundert',
author_email='kale@thekunderts.net',
description='An object-oriented GUI library for pyglet.',
long_description=readme,
url='https://github.com/kxgames/glooey',
packages=[
'glooey',
],
include_package_data=True,
install_requires=[
'pyglet',
'more_itertools',
'vecrec',
'autoprop',
'debugtools',
],
license='MIT',
zip_safe=False,
keywords=[
'glooey',
'pyglet',
'gui',
'library',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Topic :: Games/Entertainment',
'Topic :: Software Development :: User Interfaces',
'Topic :: Software Development :: Libraries',
],
)
| Add debugtools as a dependency. | Add debugtools as a dependency.
| Python | mit | kxgames/glooey,kxgames/glooey | #!/usr/bin/env python3
# encoding: utf-8
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
with open('glooey/__init__.py') as file:
version_pattern = re.compile("__version__ = '(.*)'")
version = version_pattern.search(file.read()).group(1)
with open('README.rst') as file:
readme = file.read()
setup(
name='glooey',
version=version,
author='Kale Kundert',
author_email='kale@thekunderts.net',
description='An object-oriented GUI library for pyglet.',
long_description=readme,
url='https://github.com/kxgames/glooey',
packages=[
'glooey',
],
include_package_data=True,
install_requires=[
'pyglet',
'more_itertools',
'vecrec',
'autoprop',
],
license='MIT',
zip_safe=False,
keywords=[
'glooey',
'pyglet',
'gui',
'library',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Topic :: Games/Entertainment',
'Topic :: Software Development :: User Interfaces',
'Topic :: Software Development :: Libraries',
],
)
Add debugtools as a dependency. | #!/usr/bin/env python3
# encoding: utf-8
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
with open('glooey/__init__.py') as file:
version_pattern = re.compile("__version__ = '(.*)'")
version = version_pattern.search(file.read()).group(1)
with open('README.rst') as file:
readme = file.read()
setup(
name='glooey',
version=version,
author='Kale Kundert',
author_email='kale@thekunderts.net',
description='An object-oriented GUI library for pyglet.',
long_description=readme,
url='https://github.com/kxgames/glooey',
packages=[
'glooey',
],
include_package_data=True,
install_requires=[
'pyglet',
'more_itertools',
'vecrec',
'autoprop',
'debugtools',
],
license='MIT',
zip_safe=False,
keywords=[
'glooey',
'pyglet',
'gui',
'library',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Topic :: Games/Entertainment',
'Topic :: Software Development :: User Interfaces',
'Topic :: Software Development :: Libraries',
],
)
| <commit_before>#!/usr/bin/env python3
# encoding: utf-8
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
with open('glooey/__init__.py') as file:
version_pattern = re.compile("__version__ = '(.*)'")
version = version_pattern.search(file.read()).group(1)
with open('README.rst') as file:
readme = file.read()
setup(
name='glooey',
version=version,
author='Kale Kundert',
author_email='kale@thekunderts.net',
description='An object-oriented GUI library for pyglet.',
long_description=readme,
url='https://github.com/kxgames/glooey',
packages=[
'glooey',
],
include_package_data=True,
install_requires=[
'pyglet',
'more_itertools',
'vecrec',
'autoprop',
],
license='MIT',
zip_safe=False,
keywords=[
'glooey',
'pyglet',
'gui',
'library',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Topic :: Games/Entertainment',
'Topic :: Software Development :: User Interfaces',
'Topic :: Software Development :: Libraries',
],
)
<commit_msg>Add debugtools as a dependency.<commit_after> | #!/usr/bin/env python3
# encoding: utf-8
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
with open('glooey/__init__.py') as file:
version_pattern = re.compile("__version__ = '(.*)'")
version = version_pattern.search(file.read()).group(1)
with open('README.rst') as file:
readme = file.read()
setup(
name='glooey',
version=version,
author='Kale Kundert',
author_email='kale@thekunderts.net',
description='An object-oriented GUI library for pyglet.',
long_description=readme,
url='https://github.com/kxgames/glooey',
packages=[
'glooey',
],
include_package_data=True,
install_requires=[
'pyglet',
'more_itertools',
'vecrec',
'autoprop',
'debugtools',
],
license='MIT',
zip_safe=False,
keywords=[
'glooey',
'pyglet',
'gui',
'library',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Topic :: Games/Entertainment',
'Topic :: Software Development :: User Interfaces',
'Topic :: Software Development :: Libraries',
],
)
| #!/usr/bin/env python3
# encoding: utf-8
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
with open('glooey/__init__.py') as file:
version_pattern = re.compile("__version__ = '(.*)'")
version = version_pattern.search(file.read()).group(1)
with open('README.rst') as file:
readme = file.read()
setup(
name='glooey',
version=version,
author='Kale Kundert',
author_email='kale@thekunderts.net',
description='An object-oriented GUI library for pyglet.',
long_description=readme,
url='https://github.com/kxgames/glooey',
packages=[
'glooey',
],
include_package_data=True,
install_requires=[
'pyglet',
'more_itertools',
'vecrec',
'autoprop',
],
license='MIT',
zip_safe=False,
keywords=[
'glooey',
'pyglet',
'gui',
'library',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Topic :: Games/Entertainment',
'Topic :: Software Development :: User Interfaces',
'Topic :: Software Development :: Libraries',
],
)
Add debugtools as a dependency.#!/usr/bin/env python3
# encoding: utf-8
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
with open('glooey/__init__.py') as file:
version_pattern = re.compile("__version__ = '(.*)'")
version = version_pattern.search(file.read()).group(1)
with open('README.rst') as file:
readme = file.read()
setup(
name='glooey',
version=version,
author='Kale Kundert',
author_email='kale@thekunderts.net',
description='An object-oriented GUI library for pyglet.',
long_description=readme,
url='https://github.com/kxgames/glooey',
packages=[
'glooey',
],
include_package_data=True,
install_requires=[
'pyglet',
'more_itertools',
'vecrec',
'autoprop',
'debugtools',
],
license='MIT',
zip_safe=False,
keywords=[
'glooey',
'pyglet',
'gui',
'library',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Topic :: Games/Entertainment',
'Topic :: Software Development :: User Interfaces',
'Topic :: Software Development :: Libraries',
],
)
| <commit_before>#!/usr/bin/env python3
# encoding: utf-8
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
with open('glooey/__init__.py') as file:
version_pattern = re.compile("__version__ = '(.*)'")
version = version_pattern.search(file.read()).group(1)
with open('README.rst') as file:
readme = file.read()
setup(
name='glooey',
version=version,
author='Kale Kundert',
author_email='kale@thekunderts.net',
description='An object-oriented GUI library for pyglet.',
long_description=readme,
url='https://github.com/kxgames/glooey',
packages=[
'glooey',
],
include_package_data=True,
install_requires=[
'pyglet',
'more_itertools',
'vecrec',
'autoprop',
],
license='MIT',
zip_safe=False,
keywords=[
'glooey',
'pyglet',
'gui',
'library',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Topic :: Games/Entertainment',
'Topic :: Software Development :: User Interfaces',
'Topic :: Software Development :: Libraries',
],
)
<commit_msg>Add debugtools as a dependency.<commit_after>#!/usr/bin/env python3
# encoding: utf-8
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import re
with open('glooey/__init__.py') as file:
version_pattern = re.compile("__version__ = '(.*)'")
version = version_pattern.search(file.read()).group(1)
with open('README.rst') as file:
readme = file.read()
setup(
name='glooey',
version=version,
author='Kale Kundert',
author_email='kale@thekunderts.net',
description='An object-oriented GUI library for pyglet.',
long_description=readme,
url='https://github.com/kxgames/glooey',
packages=[
'glooey',
],
include_package_data=True,
install_requires=[
'pyglet',
'more_itertools',
'vecrec',
'autoprop',
'debugtools',
],
license='MIT',
zip_safe=False,
keywords=[
'glooey',
'pyglet',
'gui',
'library',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Topic :: Games/Entertainment',
'Topic :: Software Development :: User Interfaces',
'Topic :: Software Development :: Libraries',
],
)
|
ae6911c3be172d2d163ddbfb27e90e97b7c61684 | setup.py | setup.py | from distutils.core import setup
setup(name='jupyter_c_kernel',
version='1.0',
description='Minimalistic C kernel for Jupyter',
author='Brendan Rius',
author_email='ping@brendan-rius.com',
packages=['jupyter_c_kernel'],
keywords=['jupyter', 'kernel', 'c']
)
| from distutils.core import setup
setup(name='jupyter_c_kernel',
version='1.0.0',
description='Minimalistic C kernel for Jupyter',
author='Brendan Rius',
author_email='ping@brendan-rius.com',
packages=['jupyter_c_kernel'],
keywords=['jupyter', 'kernel', 'c']
)
| Change version from 1.0 to 1.0.0 | Change version from 1.0 to 1.0.0
| Python | mit | ZedThree/jupyter-fortran-kernel | from distutils.core import setup
setup(name='jupyter_c_kernel',
version='1.0',
description='Minimalistic C kernel for Jupyter',
author='Brendan Rius',
author_email='ping@brendan-rius.com',
packages=['jupyter_c_kernel'],
keywords=['jupyter', 'kernel', 'c']
)
Change version from 1.0 to 1.0.0 | from distutils.core import setup
setup(name='jupyter_c_kernel',
version='1.0.0',
description='Minimalistic C kernel for Jupyter',
author='Brendan Rius',
author_email='ping@brendan-rius.com',
packages=['jupyter_c_kernel'],
keywords=['jupyter', 'kernel', 'c']
)
| <commit_before>from distutils.core import setup
setup(name='jupyter_c_kernel',
version='1.0',
description='Minimalistic C kernel for Jupyter',
author='Brendan Rius',
author_email='ping@brendan-rius.com',
packages=['jupyter_c_kernel'],
keywords=['jupyter', 'kernel', 'c']
)
<commit_msg>Change version from 1.0 to 1.0.0<commit_after> | from distutils.core import setup
setup(name='jupyter_c_kernel',
version='1.0.0',
description='Minimalistic C kernel for Jupyter',
author='Brendan Rius',
author_email='ping@brendan-rius.com',
packages=['jupyter_c_kernel'],
keywords=['jupyter', 'kernel', 'c']
)
| from distutils.core import setup
setup(name='jupyter_c_kernel',
version='1.0',
description='Minimalistic C kernel for Jupyter',
author='Brendan Rius',
author_email='ping@brendan-rius.com',
packages=['jupyter_c_kernel'],
keywords=['jupyter', 'kernel', 'c']
)
Change version from 1.0 to 1.0.0from distutils.core import setup
setup(name='jupyter_c_kernel',
version='1.0.0',
description='Minimalistic C kernel for Jupyter',
author='Brendan Rius',
author_email='ping@brendan-rius.com',
packages=['jupyter_c_kernel'],
keywords=['jupyter', 'kernel', 'c']
)
| <commit_before>from distutils.core import setup
setup(name='jupyter_c_kernel',
version='1.0',
description='Minimalistic C kernel for Jupyter',
author='Brendan Rius',
author_email='ping@brendan-rius.com',
packages=['jupyter_c_kernel'],
keywords=['jupyter', 'kernel', 'c']
)
<commit_msg>Change version from 1.0 to 1.0.0<commit_after>from distutils.core import setup
setup(name='jupyter_c_kernel',
version='1.0.0',
description='Minimalistic C kernel for Jupyter',
author='Brendan Rius',
author_email='ping@brendan-rius.com',
packages=['jupyter_c_kernel'],
keywords=['jupyter', 'kernel', 'c']
)
|
7947136954606b91ebf18872b24c50fef6d0c975 | setup.py | setup.py | import re
from setuptools import setup
# get version without importing
with open("extra_views/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode("utf-8")).group(1))
setup(
name="django-extra-views",
version=VERSION,
url="https://github.com/AndrewIngram/django-extra-views",
install_requires=["Django >=1.11", "six>=1.5.2"],
description="Extra class-based views for Django",
long_description=open("README.rst", "r").read(),
license="MIT",
author="Andrew Ingram",
author_email="andy@andrewingram.net",
packages=["extra_views", "extra_views.contrib"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
)
| import re
from setuptools import setup
# get version without importing
with open("extra_views/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode("utf-8")).group(1))
setup(
name="django-extra-views",
version=VERSION,
url="https://github.com/AndrewIngram/django-extra-views",
install_requires=["Django >=1.11", "six>=1.5.2"],
description="Extra class-based views for Django",
long_description=open("README.rst", "r").read(),
license="MIT",
author="Andrew Ingram",
author_email="andy@andrewingram.net",
packages=["extra_views", "extra_views.contrib"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
)
| Add license to trove classifiers. | Add license to trove classifiers. | Python | mit | AndrewIngram/django-extra-views,AndrewIngram/django-extra-views | import re
from setuptools import setup
# get version without importing
with open("extra_views/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode("utf-8")).group(1))
setup(
name="django-extra-views",
version=VERSION,
url="https://github.com/AndrewIngram/django-extra-views",
install_requires=["Django >=1.11", "six>=1.5.2"],
description="Extra class-based views for Django",
long_description=open("README.rst", "r").read(),
license="MIT",
author="Andrew Ingram",
author_email="andy@andrewingram.net",
packages=["extra_views", "extra_views.contrib"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
)
Add license to trove classifiers. | import re
from setuptools import setup
# get version without importing
with open("extra_views/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode("utf-8")).group(1))
setup(
name="django-extra-views",
version=VERSION,
url="https://github.com/AndrewIngram/django-extra-views",
install_requires=["Django >=1.11", "six>=1.5.2"],
description="Extra class-based views for Django",
long_description=open("README.rst", "r").read(),
license="MIT",
author="Andrew Ingram",
author_email="andy@andrewingram.net",
packages=["extra_views", "extra_views.contrib"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
)
| <commit_before>import re
from setuptools import setup
# get version without importing
with open("extra_views/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode("utf-8")).group(1))
setup(
name="django-extra-views",
version=VERSION,
url="https://github.com/AndrewIngram/django-extra-views",
install_requires=["Django >=1.11", "six>=1.5.2"],
description="Extra class-based views for Django",
long_description=open("README.rst", "r").read(),
license="MIT",
author="Andrew Ingram",
author_email="andy@andrewingram.net",
packages=["extra_views", "extra_views.contrib"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
)
<commit_msg>Add license to trove classifiers.<commit_after> | import re
from setuptools import setup
# get version without importing
with open("extra_views/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode("utf-8")).group(1))
setup(
name="django-extra-views",
version=VERSION,
url="https://github.com/AndrewIngram/django-extra-views",
install_requires=["Django >=1.11", "six>=1.5.2"],
description="Extra class-based views for Django",
long_description=open("README.rst", "r").read(),
license="MIT",
author="Andrew Ingram",
author_email="andy@andrewingram.net",
packages=["extra_views", "extra_views.contrib"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
)
| import re
from setuptools import setup
# get version without importing
with open("extra_views/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode("utf-8")).group(1))
setup(
name="django-extra-views",
version=VERSION,
url="https://github.com/AndrewIngram/django-extra-views",
install_requires=["Django >=1.11", "six>=1.5.2"],
description="Extra class-based views for Django",
long_description=open("README.rst", "r").read(),
license="MIT",
author="Andrew Ingram",
author_email="andy@andrewingram.net",
packages=["extra_views", "extra_views.contrib"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
)
Add license to trove classifiers.import re
from setuptools import setup
# get version without importing
with open("extra_views/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode("utf-8")).group(1))
setup(
name="django-extra-views",
version=VERSION,
url="https://github.com/AndrewIngram/django-extra-views",
install_requires=["Django >=1.11", "six>=1.5.2"],
description="Extra class-based views for Django",
long_description=open("README.rst", "r").read(),
license="MIT",
author="Andrew Ingram",
author_email="andy@andrewingram.net",
packages=["extra_views", "extra_views.contrib"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
)
| <commit_before>import re
from setuptools import setup
# get version without importing
with open("extra_views/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode("utf-8")).group(1))
setup(
name="django-extra-views",
version=VERSION,
url="https://github.com/AndrewIngram/django-extra-views",
install_requires=["Django >=1.11", "six>=1.5.2"],
description="Extra class-based views for Django",
long_description=open("README.rst", "r").read(),
license="MIT",
author="Andrew Ingram",
author_email="andy@andrewingram.net",
packages=["extra_views", "extra_views.contrib"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
)
<commit_msg>Add license to trove classifiers.<commit_after>import re
from setuptools import setup
# get version without importing
with open("extra_views/__init__.py", "rb") as f:
VERSION = str(re.search('__version__ = "(.+?)"', f.read().decode("utf-8")).group(1))
setup(
name="django-extra-views",
version=VERSION,
url="https://github.com/AndrewIngram/django-extra-views",
install_requires=["Django >=1.11", "six>=1.5.2"],
description="Extra class-based views for Django",
long_description=open("README.rst", "r").read(),
license="MIT",
author="Andrew Ingram",
author_email="andy@andrewingram.net",
packages=["extra_views", "extra_views.contrib"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
)
|
1c0302c7137af550314b05f6e3cc87134c9bdc65 | flaskrst/modules/staticpages/__init__.py | flaskrst/modules/staticpages/__init__.py | # -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import Blueprint, current_app, render_template
from flaskrst.parsers import rstDocument
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages) | # -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, current_app, render_template, safe_join
from flaskrst.parsers import rstDocument
staticpages = Blueprint('staticpages', __name__, template_folder='templates')
@staticpages.route('/', defaults={'file_path': 'index'})
@staticpages.route('/<path:file_path>')
def page(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = safe_join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(staticpages) | Rename blueprint static_pages to staticpages and rename staticpages.show to staticpages.page | Rename blueprint static_pages to staticpages and rename staticpages.show to staticpages.page
| Python | bsd-3-clause | jarus/flask-rst | # -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import Blueprint, current_app, render_template
from flaskrst.parsers import rstDocument
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)Rename blueprint static_pages to staticpages and rename staticpages.show to staticpages.page | # -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, current_app, render_template, safe_join
from flaskrst.parsers import rstDocument
staticpages = Blueprint('staticpages', __name__, template_folder='templates')
@staticpages.route('/', defaults={'file_path': 'index'})
@staticpages.route('/<path:file_path>')
def page(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = safe_join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(staticpages) | <commit_before># -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import Blueprint, current_app, render_template
from flaskrst.parsers import rstDocument
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)<commit_msg>Rename blueprint static_pages to staticpages and rename staticpages.show to staticpages.page<commit_after> | # -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, current_app, render_template, safe_join
from flaskrst.parsers import rstDocument
staticpages = Blueprint('staticpages', __name__, template_folder='templates')
@staticpages.route('/', defaults={'file_path': 'index'})
@staticpages.route('/<path:file_path>')
def page(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = safe_join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(staticpages) | # -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import Blueprint, current_app, render_template
from flaskrst.parsers import rstDocument
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)Rename blueprint static_pages to staticpages and rename staticpages.show to staticpages.page# -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, current_app, render_template, safe_join
from flaskrst.parsers import rstDocument
staticpages = Blueprint('staticpages', __name__, template_folder='templates')
@staticpages.route('/', defaults={'file_path': 'index'})
@staticpages.route('/<path:file_path>')
def page(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = safe_join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(staticpages) | <commit_before># -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import Blueprint, current_app, render_template
from flaskrst.parsers import rstDocument
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)<commit_msg>Rename blueprint static_pages to staticpages and rename staticpages.show to staticpages.page<commit_after># -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, current_app, render_template, safe_join
from flaskrst.parsers import rstDocument
staticpages = Blueprint('staticpages', __name__, template_folder='templates')
@staticpages.route('/', defaults={'file_path': 'index'})
@staticpages.route('/<path:file_path>')
def page(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = safe_join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(staticpages) |
eea0f026a8fed261283c081d7bc447ec480ff6e5 | tasks.py | tasks.py | #!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not color output, unfortunately
nosetests = subprocess.Popen(['nosetests', '--rednose'])
nosetests.wait()
@task
def cover():
nosetests = subprocess.Popen(['nosetests', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
| #!/usr/bin/env python3
import os
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not respect colored output, unfortunately
nosetests = subprocess.Popen(['nosetests', '--rednose'])
nosetests.wait()
@task
def cover():
try:
os.mkdir('cover')
except OSError:
pass
nosetests = subprocess.Popen(['nosetests', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
| Fix bug that raised error when generating coverage | Fix bug that raised error when generating coverage
| Python | mit | caleb531/ssh-wp-backup,caleb531/ssh-wp-backup | #!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not color output, unfortunately
nosetests = subprocess.Popen(['nosetests', '--rednose'])
nosetests.wait()
@task
def cover():
nosetests = subprocess.Popen(['nosetests', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
Fix bug that raised error when generating coverage | #!/usr/bin/env python3
import os
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not respect colored output, unfortunately
nosetests = subprocess.Popen(['nosetests', '--rednose'])
nosetests.wait()
@task
def cover():
try:
os.mkdir('cover')
except OSError:
pass
nosetests = subprocess.Popen(['nosetests', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
| <commit_before>#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not color output, unfortunately
nosetests = subprocess.Popen(['nosetests', '--rednose'])
nosetests.wait()
@task
def cover():
nosetests = subprocess.Popen(['nosetests', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
<commit_msg>Fix bug that raised error when generating coverage<commit_after> | #!/usr/bin/env python3
import os
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not respect colored output, unfortunately
nosetests = subprocess.Popen(['nosetests', '--rednose'])
nosetests.wait()
@task
def cover():
try:
os.mkdir('cover')
except OSError:
pass
nosetests = subprocess.Popen(['nosetests', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
| #!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not color output, unfortunately
nosetests = subprocess.Popen(['nosetests', '--rednose'])
nosetests.wait()
@task
def cover():
nosetests = subprocess.Popen(['nosetests', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
Fix bug that raised error when generating coverage#!/usr/bin/env python3
import os
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not respect colored output, unfortunately
nosetests = subprocess.Popen(['nosetests', '--rednose'])
nosetests.wait()
@task
def cover():
try:
os.mkdir('cover')
except OSError:
pass
nosetests = subprocess.Popen(['nosetests', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
| <commit_before>#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not color output, unfortunately
nosetests = subprocess.Popen(['nosetests', '--rednose'])
nosetests.wait()
@task
def cover():
nosetests = subprocess.Popen(['nosetests', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
<commit_msg>Fix bug that raised error when generating coverage<commit_after>#!/usr/bin/env python3
import os
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not respect colored output, unfortunately
nosetests = subprocess.Popen(['nosetests', '--rednose'])
nosetests.wait()
@task
def cover():
try:
os.mkdir('cover')
except OSError:
pass
nosetests = subprocess.Popen(['nosetests', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
|
ba115c2087b7fc5b07073ee42af6e2548d462245 | scripts/scripts/current_track.py | scripts/scripts/current_track.py | import subprocess
def main():
st = subprocess.getoutput("mpc")
lin = st.split("\n")
if len(lin) > 1:
sn_status = lin[1]
duration = lin[1].split()
if "paused" in sn_status:
print(lin[0].split("-")[-1] + " [paused]")
elif "playing" in sn_status:
print(lin[0].split("-")[-1] + " " + duration[2])
else:
print("stopped")
else:
print("stopped")
if __name__ == "__main__":
main()
| import subprocess
def text_split(text):
new_text = text.split()
new_text_len = len(new_text)
if new_text_len < 2:
return new_text[0]
elif new_text_len == 2:
return text
else:
return " ".join(new_text[0:2]) + "..."
def main():
st = subprocess.getoutput("mpc")
lin = st.split("\n")
if len(lin) > 1:
sn_status = lin[1]
duration = lin[1].split()
if "paused" in sn_status:
print(lin[0].split("-")[-1] + " [paused]")
elif "playing" in sn_status:
print(lin[0].split("-")[-1] + " " + duration[2])
else:
print("stopped")
else:
title = subprocess.getoutput("spotify-now -i %title -p 'paused' -e stopped")
if title == "paused" or title == "stopped":
print(title)
elif title == "":
print("empty")
else:
artist = subprocess.getoutput("spotify-now -i '%artist'")
new_title = text_split(title)
new_artist = text_split(artist)
print(new_title + ' - ' + new_artist)
if __name__ == "__main__":
main()
| Use `spotify-now` to get current song info in i3blocks | [track] Use `spotify-now` to get current song info in i3blocks
| Python | mit | iAmMrinal0/dotfiles,iAmMrinal0/dotfiles | import subprocess
def main():
st = subprocess.getoutput("mpc")
lin = st.split("\n")
if len(lin) > 1:
sn_status = lin[1]
duration = lin[1].split()
if "paused" in sn_status:
print(lin[0].split("-")[-1] + " [paused]")
elif "playing" in sn_status:
print(lin[0].split("-")[-1] + " " + duration[2])
else:
print("stopped")
else:
print("stopped")
if __name__ == "__main__":
main()
[track] Use `spotify-now` to get current song info in i3blocks | import subprocess
def text_split(text):
new_text = text.split()
new_text_len = len(new_text)
if new_text_len < 2:
return new_text[0]
elif new_text_len == 2:
return text
else:
return " ".join(new_text[0:2]) + "..."
def main():
st = subprocess.getoutput("mpc")
lin = st.split("\n")
if len(lin) > 1:
sn_status = lin[1]
duration = lin[1].split()
if "paused" in sn_status:
print(lin[0].split("-")[-1] + " [paused]")
elif "playing" in sn_status:
print(lin[0].split("-")[-1] + " " + duration[2])
else:
print("stopped")
else:
title = subprocess.getoutput("spotify-now -i %title -p 'paused' -e stopped")
if title == "paused" or title == "stopped":
print(title)
elif title == "":
print("empty")
else:
artist = subprocess.getoutput("spotify-now -i '%artist'")
new_title = text_split(title)
new_artist = text_split(artist)
print(new_title + ' - ' + new_artist)
if __name__ == "__main__":
main()
| <commit_before>import subprocess
def main():
st = subprocess.getoutput("mpc")
lin = st.split("\n")
if len(lin) > 1:
sn_status = lin[1]
duration = lin[1].split()
if "paused" in sn_status:
print(lin[0].split("-")[-1] + " [paused]")
elif "playing" in sn_status:
print(lin[0].split("-")[-1] + " " + duration[2])
else:
print("stopped")
else:
print("stopped")
if __name__ == "__main__":
main()
<commit_msg>[track] Use `spotify-now` to get current song info in i3blocks<commit_after> | import subprocess
def text_split(text):
new_text = text.split()
new_text_len = len(new_text)
if new_text_len < 2:
return new_text[0]
elif new_text_len == 2:
return text
else:
return " ".join(new_text[0:2]) + "..."
def main():
st = subprocess.getoutput("mpc")
lin = st.split("\n")
if len(lin) > 1:
sn_status = lin[1]
duration = lin[1].split()
if "paused" in sn_status:
print(lin[0].split("-")[-1] + " [paused]")
elif "playing" in sn_status:
print(lin[0].split("-")[-1] + " " + duration[2])
else:
print("stopped")
else:
title = subprocess.getoutput("spotify-now -i %title -p 'paused' -e stopped")
if title == "paused" or title == "stopped":
print(title)
elif title == "":
print("empty")
else:
artist = subprocess.getoutput("spotify-now -i '%artist'")
new_title = text_split(title)
new_artist = text_split(artist)
print(new_title + ' - ' + new_artist)
if __name__ == "__main__":
main()
| import subprocess
def main():
st = subprocess.getoutput("mpc")
lin = st.split("\n")
if len(lin) > 1:
sn_status = lin[1]
duration = lin[1].split()
if "paused" in sn_status:
print(lin[0].split("-")[-1] + " [paused]")
elif "playing" in sn_status:
print(lin[0].split("-")[-1] + " " + duration[2])
else:
print("stopped")
else:
print("stopped")
if __name__ == "__main__":
main()
[track] Use `spotify-now` to get current song info in i3blocksimport subprocess
def text_split(text):
new_text = text.split()
new_text_len = len(new_text)
if new_text_len < 2:
return new_text[0]
elif new_text_len == 2:
return text
else:
return " ".join(new_text[0:2]) + "..."
def main():
st = subprocess.getoutput("mpc")
lin = st.split("\n")
if len(lin) > 1:
sn_status = lin[1]
duration = lin[1].split()
if "paused" in sn_status:
print(lin[0].split("-")[-1] + " [paused]")
elif "playing" in sn_status:
print(lin[0].split("-")[-1] + " " + duration[2])
else:
print("stopped")
else:
title = subprocess.getoutput("spotify-now -i %title -p 'paused' -e stopped")
if title == "paused" or title == "stopped":
print(title)
elif title == "":
print("empty")
else:
artist = subprocess.getoutput("spotify-now -i '%artist'")
new_title = text_split(title)
new_artist = text_split(artist)
print(new_title + ' - ' + new_artist)
if __name__ == "__main__":
main()
| <commit_before>import subprocess
def main():
st = subprocess.getoutput("mpc")
lin = st.split("\n")
if len(lin) > 1:
sn_status = lin[1]
duration = lin[1].split()
if "paused" in sn_status:
print(lin[0].split("-")[-1] + " [paused]")
elif "playing" in sn_status:
print(lin[0].split("-")[-1] + " " + duration[2])
else:
print("stopped")
else:
print("stopped")
if __name__ == "__main__":
main()
<commit_msg>[track] Use `spotify-now` to get current song info in i3blocks<commit_after>import subprocess
def text_split(text):
new_text = text.split()
new_text_len = len(new_text)
if new_text_len < 2:
return new_text[0]
elif new_text_len == 2:
return text
else:
return " ".join(new_text[0:2]) + "..."
def main():
st = subprocess.getoutput("mpc")
lin = st.split("\n")
if len(lin) > 1:
sn_status = lin[1]
duration = lin[1].split()
if "paused" in sn_status:
print(lin[0].split("-")[-1] + " [paused]")
elif "playing" in sn_status:
print(lin[0].split("-")[-1] + " " + duration[2])
else:
print("stopped")
else:
title = subprocess.getoutput("spotify-now -i %title -p 'paused' -e stopped")
if title == "paused" or title == "stopped":
print(title)
elif title == "":
print("empty")
else:
artist = subprocess.getoutput("spotify-now -i '%artist'")
new_title = text_split(title)
new_artist = text_split(artist)
print(new_title + ' - ' + new_artist)
if __name__ == "__main__":
main()
|
5ebd910a5665402b68e50f540d8480d8c3bd4e64 | pyflation/analysis/deltaprel.py | pyflation/analysis/deltaprel.py | ''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
| ''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
def soundspeeds(Vphi, phidot, H):
"""Sound speeds of the background fields
Arguments
---------
Vphi: array_like
First derivative of the potential with respect to the fields
phidot: array_like
First derivative of the field values with respect to efold number N.
H: array_like
The Hubble parameter
All the arguments should have the same number of dimensions. Vphi and phidot
should be arrays of the same size, but H should have a dimension of size 1
corresponding to the "field" dimension of the other variables.
"""
try:
calphasq = 1 + 2*Vphi/(3*H**2*phidot)
except ValueError:
raise ValueError("""Arrays need to have the correct shape.
Vphi and phidot should have exactly the same shape,
and H should have a dimension of size 1 corresponding
to the "field" dimension of the others.""")
return calphasq
def rhodots():
"""Derivative in e-fold time of the energy densities of the individual fields."""
pass
def fullrhodot():
"""Combined derivative in e-fold time of the energy density of the field"""
pass
def deltarhosmatrix():
"""Matrix of the first order perturbed energy densities of the field components."""
pass
def deltaprel():
"""Perturbed relative pressure of the fields given as quantum mode functions."""
pass
def deltaprelspectrum():
"""Power spectrum of the full perturbed relative pressure."""
pass | Add outlines of functions and code for soundspeeds. | Add outlines of functions and code for soundspeeds.
| Python | bsd-3-clause | ihuston/pyflation,ihuston/pyflation | ''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
Add outlines of functions and code for soundspeeds. | ''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
def soundspeeds(Vphi, phidot, H):
"""Sound speeds of the background fields
Arguments
---------
Vphi: array_like
First derivative of the potential with respect to the fields
phidot: array_like
First derivative of the field values with respect to efold number N.
H: array_like
The Hubble parameter
All the arguments should have the same number of dimensions. Vphi and phidot
should be arrays of the same size, but H should have a dimension of size 1
corresponding to the "field" dimension of the other variables.
"""
try:
calphasq = 1 + 2*Vphi/(3*H**2*phidot)
except ValueError:
raise ValueError("""Arrays need to have the correct shape.
Vphi and phidot should have exactly the same shape,
and H should have a dimension of size 1 corresponding
to the "field" dimension of the others.""")
return calphasq
def rhodots():
"""Derivative in e-fold time of the energy densities of the individual fields."""
pass
def fullrhodot():
"""Combined derivative in e-fold time of the energy density of the field"""
pass
def deltarhosmatrix():
"""Matrix of the first order perturbed energy densities of the field components."""
pass
def deltaprel():
"""Perturbed relative pressure of the fields given as quantum mode functions."""
pass
def deltaprelspectrum():
"""Power spectrum of the full perturbed relative pressure."""
pass | <commit_before>''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
<commit_msg>Add outlines of functions and code for soundspeeds.<commit_after> | ''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
def soundspeeds(Vphi, phidot, H):
"""Sound speeds of the background fields
Arguments
---------
Vphi: array_like
First derivative of the potential with respect to the fields
phidot: array_like
First derivative of the field values with respect to efold number N.
H: array_like
The Hubble parameter
All the arguments should have the same number of dimensions. Vphi and phidot
should be arrays of the same size, but H should have a dimension of size 1
corresponding to the "field" dimension of the other variables.
"""
try:
calphasq = 1 + 2*Vphi/(3*H**2*phidot)
except ValueError:
raise ValueError("""Arrays need to have the correct shape.
Vphi and phidot should have exactly the same shape,
and H should have a dimension of size 1 corresponding
to the "field" dimension of the others.""")
return calphasq
def rhodots():
"""Derivative in e-fold time of the energy densities of the individual fields."""
pass
def fullrhodot():
"""Combined derivative in e-fold time of the energy density of the field"""
pass
def deltarhosmatrix():
"""Matrix of the first order perturbed energy densities of the field components."""
pass
def deltaprel():
"""Perturbed relative pressure of the fields given as quantum mode functions."""
pass
def deltaprelspectrum():
"""Power spectrum of the full perturbed relative pressure."""
pass | ''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
Add outlines of functions and code for soundspeeds.''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
def soundspeeds(Vphi, phidot, H):
"""Sound speeds of the background fields
Arguments
---------
Vphi: array_like
First derivative of the potential with respect to the fields
phidot: array_like
First derivative of the field values with respect to efold number N.
H: array_like
The Hubble parameter
All the arguments should have the same number of dimensions. Vphi and phidot
should be arrays of the same size, but H should have a dimension of size 1
corresponding to the "field" dimension of the other variables.
"""
try:
calphasq = 1 + 2*Vphi/(3*H**2*phidot)
except ValueError:
raise ValueError("""Arrays need to have the correct shape.
Vphi and phidot should have exactly the same shape,
and H should have a dimension of size 1 corresponding
to the "field" dimension of the others.""")
return calphasq
def rhodots():
"""Derivative in e-fold time of the energy densities of the individual fields."""
pass
def fullrhodot():
"""Combined derivative in e-fold time of the energy density of the field"""
pass
def deltarhosmatrix():
"""Matrix of the first order perturbed energy densities of the field components."""
pass
def deltaprel():
"""Perturbed relative pressure of the fields given as quantum mode functions."""
pass
def deltaprelspectrum():
"""Power spectrum of the full perturbed relative pressure."""
pass | <commit_before>''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
<commit_msg>Add outlines of functions and code for soundspeeds.<commit_after>''' pyflation.analysis.deltaprel - Module to calculate relative pressure
perturbations.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
def soundspeeds(Vphi, phidot, H):
"""Sound speeds of the background fields
Arguments
---------
Vphi: array_like
First derivative of the potential with respect to the fields
phidot: array_like
First derivative of the field values with respect to efold number N.
H: array_like
The Hubble parameter
All the arguments should have the same number of dimensions. Vphi and phidot
should be arrays of the same size, but H should have a dimension of size 1
corresponding to the "field" dimension of the other variables.
"""
try:
calphasq = 1 + 2*Vphi/(3*H**2*phidot)
except ValueError:
raise ValueError("""Arrays need to have the correct shape.
Vphi and phidot should have exactly the same shape,
and H should have a dimension of size 1 corresponding
to the "field" dimension of the others.""")
return calphasq
def rhodots():
"""Derivative in e-fold time of the energy densities of the individual fields."""
pass
def fullrhodot():
"""Combined derivative in e-fold time of the energy density of the field"""
pass
def deltarhosmatrix():
"""Matrix of the first order perturbed energy densities of the field components."""
pass
def deltaprel():
"""Perturbed relative pressure of the fields given as quantum mode functions."""
pass
def deltaprelspectrum():
"""Power spectrum of the full perturbed relative pressure."""
pass |
ce9d547af419ec46c1e659a0ca630a752e59e01c | pombola/south_africa/views/constants.py | pombola/south_africa/views/constants.py | # For requests to external APIs, timeout after 3 seconds:
API_REQUESTS_TIMEOUT = 3.05
| # For requests to external APIs, timeout after 3 seconds:
API_REQUESTS_TIMEOUT = 6.05
| Increase the PMG API timeout, since it's slow at the moment | Increase the PMG API timeout, since it's slow at the moment
| Python | agpl-3.0 | mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola | # For requests to external APIs, timeout after 3 seconds:
API_REQUESTS_TIMEOUT = 3.05
Increase the PMG API timeout, since it's slow at the moment | # For requests to external APIs, timeout after 3 seconds:
API_REQUESTS_TIMEOUT = 6.05
| <commit_before># For requests to external APIs, timeout after 3 seconds:
API_REQUESTS_TIMEOUT = 3.05
<commit_msg>Increase the PMG API timeout, since it's slow at the moment<commit_after> | # For requests to external APIs, timeout after 3 seconds:
API_REQUESTS_TIMEOUT = 6.05
| # For requests to external APIs, timeout after 3 seconds:
API_REQUESTS_TIMEOUT = 3.05
Increase the PMG API timeout, since it's slow at the moment# For requests to external APIs, timeout after 3 seconds:
API_REQUESTS_TIMEOUT = 6.05
| <commit_before># For requests to external APIs, timeout after 3 seconds:
API_REQUESTS_TIMEOUT = 3.05
<commit_msg>Increase the PMG API timeout, since it's slow at the moment<commit_after># For requests to external APIs, timeout after 3 seconds:
API_REQUESTS_TIMEOUT = 6.05
|
b612d7a6d67e999f96917de642230946ccf02106 | qnd/experiment.py | qnd/experiment.py | import tensorflow as tf
from .flag import FLAGS, FlagAdder
from .estimator import def_estimator
from .inputs import def_def_train_input_fn, def_def_eval_input_fn
def def_def_experiment_fn(batch_inputs=True,
prepare_filename_queues=True,
distributed=False):
adder = FlagAdder()
for mode in [tf.contrib.learn.ModeKeys.TRAIN,
tf.contrib.learn.ModeKeys.EVAL]:
adder.add_flag("{}_steps".format(mode), type=int,
help="Maximum number of {} steps".format(mode))
adder.add_flag("min_eval_frequency", type=int, default=1,
help="Minimum evaluation frequency in number of model "
"savings")
estimator = def_estimator(distributed)
def_train_input_fn = def_def_train_input_fn(batch_inputs,
prepare_filename_queues)
def_eval_input_fn = def_def_eval_input_fn(batch_inputs,
prepare_filename_queues)
def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None):
def experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
estimator(model_fn, output_dir),
def_train_input_fn(train_input_fn),
def_eval_input_fn(eval_input_fn or train_input_fn),
**adder.flags)
return experiment_fn
return def_experiment_fn
| import tensorflow as tf
from .flag import FLAGS, FlagAdder
from .estimator import def_estimator
from .inputs import def_def_train_input_fn, def_def_eval_input_fn
def def_def_experiment_fn(batch_inputs=True,
prepare_filename_queues=True,
distributed=False):
adder = FlagAdder()
for mode in [tf.contrib.learn.ModeKeys.TRAIN,
tf.contrib.learn.ModeKeys.EVAL]:
adder.add_flag("{}_steps".format(mode), type=int,
help="Maximum number of {} steps".format(mode))
adder.add_flag("min_eval_frequency", type=int, default=1,
help="Minimum evaluation frequency in number of train steps")
estimator = def_estimator(distributed)
def_train_input_fn = def_def_train_input_fn(batch_inputs,
prepare_filename_queues)
def_eval_input_fn = def_def_eval_input_fn(batch_inputs,
prepare_filename_queues)
def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None):
def experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
estimator(model_fn, output_dir),
def_train_input_fn(train_input_fn),
def_eval_input_fn(eval_input_fn or train_input_fn),
**adder.flags)
return experiment_fn
return def_experiment_fn
| Fix help message of --min_eval_frequency flag | Fix help message of --min_eval_frequency flag
| Python | unlicense | raviqqe/tensorflow-qnd,raviqqe/tensorflow-qnd | import tensorflow as tf
from .flag import FLAGS, FlagAdder
from .estimator import def_estimator
from .inputs import def_def_train_input_fn, def_def_eval_input_fn
def def_def_experiment_fn(batch_inputs=True,
prepare_filename_queues=True,
distributed=False):
adder = FlagAdder()
for mode in [tf.contrib.learn.ModeKeys.TRAIN,
tf.contrib.learn.ModeKeys.EVAL]:
adder.add_flag("{}_steps".format(mode), type=int,
help="Maximum number of {} steps".format(mode))
adder.add_flag("min_eval_frequency", type=int, default=1,
help="Minimum evaluation frequency in number of model "
"savings")
estimator = def_estimator(distributed)
def_train_input_fn = def_def_train_input_fn(batch_inputs,
prepare_filename_queues)
def_eval_input_fn = def_def_eval_input_fn(batch_inputs,
prepare_filename_queues)
def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None):
def experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
estimator(model_fn, output_dir),
def_train_input_fn(train_input_fn),
def_eval_input_fn(eval_input_fn or train_input_fn),
**adder.flags)
return experiment_fn
return def_experiment_fn
Fix help message of --min_eval_frequency flag | import tensorflow as tf
from .flag import FLAGS, FlagAdder
from .estimator import def_estimator
from .inputs import def_def_train_input_fn, def_def_eval_input_fn
def def_def_experiment_fn(batch_inputs=True,
prepare_filename_queues=True,
distributed=False):
adder = FlagAdder()
for mode in [tf.contrib.learn.ModeKeys.TRAIN,
tf.contrib.learn.ModeKeys.EVAL]:
adder.add_flag("{}_steps".format(mode), type=int,
help="Maximum number of {} steps".format(mode))
adder.add_flag("min_eval_frequency", type=int, default=1,
help="Minimum evaluation frequency in number of train steps")
estimator = def_estimator(distributed)
def_train_input_fn = def_def_train_input_fn(batch_inputs,
prepare_filename_queues)
def_eval_input_fn = def_def_eval_input_fn(batch_inputs,
prepare_filename_queues)
def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None):
def experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
estimator(model_fn, output_dir),
def_train_input_fn(train_input_fn),
def_eval_input_fn(eval_input_fn or train_input_fn),
**adder.flags)
return experiment_fn
return def_experiment_fn
| <commit_before>import tensorflow as tf
from .flag import FLAGS, FlagAdder
from .estimator import def_estimator
from .inputs import def_def_train_input_fn, def_def_eval_input_fn
def def_def_experiment_fn(batch_inputs=True,
prepare_filename_queues=True,
distributed=False):
adder = FlagAdder()
for mode in [tf.contrib.learn.ModeKeys.TRAIN,
tf.contrib.learn.ModeKeys.EVAL]:
adder.add_flag("{}_steps".format(mode), type=int,
help="Maximum number of {} steps".format(mode))
adder.add_flag("min_eval_frequency", type=int, default=1,
help="Minimum evaluation frequency in number of model "
"savings")
estimator = def_estimator(distributed)
def_train_input_fn = def_def_train_input_fn(batch_inputs,
prepare_filename_queues)
def_eval_input_fn = def_def_eval_input_fn(batch_inputs,
prepare_filename_queues)
def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None):
def experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
estimator(model_fn, output_dir),
def_train_input_fn(train_input_fn),
def_eval_input_fn(eval_input_fn or train_input_fn),
**adder.flags)
return experiment_fn
return def_experiment_fn
<commit_msg>Fix help message of --min_eval_frequency flag<commit_after> | import tensorflow as tf
from .flag import FLAGS, FlagAdder
from .estimator import def_estimator
from .inputs import def_def_train_input_fn, def_def_eval_input_fn
def def_def_experiment_fn(batch_inputs=True,
prepare_filename_queues=True,
distributed=False):
adder = FlagAdder()
for mode in [tf.contrib.learn.ModeKeys.TRAIN,
tf.contrib.learn.ModeKeys.EVAL]:
adder.add_flag("{}_steps".format(mode), type=int,
help="Maximum number of {} steps".format(mode))
adder.add_flag("min_eval_frequency", type=int, default=1,
help="Minimum evaluation frequency in number of train steps")
estimator = def_estimator(distributed)
def_train_input_fn = def_def_train_input_fn(batch_inputs,
prepare_filename_queues)
def_eval_input_fn = def_def_eval_input_fn(batch_inputs,
prepare_filename_queues)
def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None):
def experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
estimator(model_fn, output_dir),
def_train_input_fn(train_input_fn),
def_eval_input_fn(eval_input_fn or train_input_fn),
**adder.flags)
return experiment_fn
return def_experiment_fn
| import tensorflow as tf
from .flag import FLAGS, FlagAdder
from .estimator import def_estimator
from .inputs import def_def_train_input_fn, def_def_eval_input_fn
def def_def_experiment_fn(batch_inputs=True,
prepare_filename_queues=True,
distributed=False):
adder = FlagAdder()
for mode in [tf.contrib.learn.ModeKeys.TRAIN,
tf.contrib.learn.ModeKeys.EVAL]:
adder.add_flag("{}_steps".format(mode), type=int,
help="Maximum number of {} steps".format(mode))
adder.add_flag("min_eval_frequency", type=int, default=1,
help="Minimum evaluation frequency in number of model "
"savings")
estimator = def_estimator(distributed)
def_train_input_fn = def_def_train_input_fn(batch_inputs,
prepare_filename_queues)
def_eval_input_fn = def_def_eval_input_fn(batch_inputs,
prepare_filename_queues)
def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None):
def experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
estimator(model_fn, output_dir),
def_train_input_fn(train_input_fn),
def_eval_input_fn(eval_input_fn or train_input_fn),
**adder.flags)
return experiment_fn
return def_experiment_fn
Fix help message of --min_eval_frequency flagimport tensorflow as tf
from .flag import FLAGS, FlagAdder
from .estimator import def_estimator
from .inputs import def_def_train_input_fn, def_def_eval_input_fn
def def_def_experiment_fn(batch_inputs=True,
prepare_filename_queues=True,
distributed=False):
adder = FlagAdder()
for mode in [tf.contrib.learn.ModeKeys.TRAIN,
tf.contrib.learn.ModeKeys.EVAL]:
adder.add_flag("{}_steps".format(mode), type=int,
help="Maximum number of {} steps".format(mode))
adder.add_flag("min_eval_frequency", type=int, default=1,
help="Minimum evaluation frequency in number of train steps")
estimator = def_estimator(distributed)
def_train_input_fn = def_def_train_input_fn(batch_inputs,
prepare_filename_queues)
def_eval_input_fn = def_def_eval_input_fn(batch_inputs,
prepare_filename_queues)
def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None):
def experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
estimator(model_fn, output_dir),
def_train_input_fn(train_input_fn),
def_eval_input_fn(eval_input_fn or train_input_fn),
**adder.flags)
return experiment_fn
return def_experiment_fn
| <commit_before>import tensorflow as tf
from .flag import FLAGS, FlagAdder
from .estimator import def_estimator
from .inputs import def_def_train_input_fn, def_def_eval_input_fn
def def_def_experiment_fn(batch_inputs=True,
prepare_filename_queues=True,
distributed=False):
adder = FlagAdder()
for mode in [tf.contrib.learn.ModeKeys.TRAIN,
tf.contrib.learn.ModeKeys.EVAL]:
adder.add_flag("{}_steps".format(mode), type=int,
help="Maximum number of {} steps".format(mode))
adder.add_flag("min_eval_frequency", type=int, default=1,
help="Minimum evaluation frequency in number of model "
"savings")
estimator = def_estimator(distributed)
def_train_input_fn = def_def_train_input_fn(batch_inputs,
prepare_filename_queues)
def_eval_input_fn = def_def_eval_input_fn(batch_inputs,
prepare_filename_queues)
def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None):
def experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
estimator(model_fn, output_dir),
def_train_input_fn(train_input_fn),
def_eval_input_fn(eval_input_fn or train_input_fn),
**adder.flags)
return experiment_fn
return def_experiment_fn
<commit_msg>Fix help message of --min_eval_frequency flag<commit_after>import tensorflow as tf
from .flag import FLAGS, FlagAdder
from .estimator import def_estimator
from .inputs import def_def_train_input_fn, def_def_eval_input_fn
def def_def_experiment_fn(batch_inputs=True,
prepare_filename_queues=True,
distributed=False):
adder = FlagAdder()
for mode in [tf.contrib.learn.ModeKeys.TRAIN,
tf.contrib.learn.ModeKeys.EVAL]:
adder.add_flag("{}_steps".format(mode), type=int,
help="Maximum number of {} steps".format(mode))
adder.add_flag("min_eval_frequency", type=int, default=1,
help="Minimum evaluation frequency in number of train steps")
estimator = def_estimator(distributed)
def_train_input_fn = def_def_train_input_fn(batch_inputs,
prepare_filename_queues)
def_eval_input_fn = def_def_eval_input_fn(batch_inputs,
prepare_filename_queues)
def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None):
def experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
estimator(model_fn, output_dir),
def_train_input_fn(train_input_fn),
def_eval_input_fn(eval_input_fn or train_input_fn),
**adder.flags)
return experiment_fn
return def_experiment_fn
|
2a8816e07eec2cfc4680c76c1c5e080a78f149b4 | etc/bin/xcode_bot_script.py | etc/bin/xcode_bot_script.py | # This script should be copied into the Run Script trigger of an Xcode Bot
# `Xcode Bot > Edit Bot > Triggers > After Integration > Run Script`
| # This script should be copied into the Run Script trigger of an Xcode Bot
# `Xcode Bot > Edit Bot > Triggers > After Integration > Run Script`
# Utilizes `cavejohnson` for various integrations
# https://github.com/drewcrawford/CaveJohnson
#!/bin/bash
PATH=/Library/Frameworks/Python.framework/Versions/3.4/bin:$PATH
# GitHub
# First set a github auth token like so:
# sudo -u _xcsbuildd cavejohnson setGithubAuthToken --token auth_token_generated_by_github
# Set build status on GitHub
cavejohnson setGithubStatus
echo "Finished running Xcode Bot's Run Script Trigger"
| Add displaying integration status on GitHub | Add displaying integration status on GitHub
| Python | bsd-3-clause | apptentive/apptentive-ios,sahara108/apptentive-ios,Jawbone/apptentive-ios,hibu/apptentive-ios,ALHariPrasad/apptentive-ios,apptentive/apptentive-ios,hibu/apptentive-ios,apptentive/apptentive-ios,hibu/apptentive-ios,sahara108/apptentive-ios,Jawbone/apptentive-ios,ALHariPrasad/apptentive-ios | # This script should be copied into the Run Script trigger of an Xcode Bot
# `Xcode Bot > Edit Bot > Triggers > After Integration > Run Script`
Add displaying integration status on GitHub | # This script should be copied into the Run Script trigger of an Xcode Bot
# `Xcode Bot > Edit Bot > Triggers > After Integration > Run Script`
# Utilizes `cavejohnson` for various integrations
# https://github.com/drewcrawford/CaveJohnson
#!/bin/bash
PATH=/Library/Frameworks/Python.framework/Versions/3.4/bin:$PATH
# GitHub
# First set a github auth token like so:
# sudo -u _xcsbuildd cavejohnson setGithubAuthToken --token auth_token_generated_by_github
# Set build status on GitHub
cavejohnson setGithubStatus
echo "Finished running Xcode Bot's Run Script Trigger"
| <commit_before># This script should be copied into the Run Script trigger of an Xcode Bot
# `Xcode Bot > Edit Bot > Triggers > After Integration > Run Script`
<commit_msg>Add displaying integration status on GitHub<commit_after> | # This script should be copied into the Run Script trigger of an Xcode Bot
# `Xcode Bot > Edit Bot > Triggers > After Integration > Run Script`
# Utilizes `cavejohnson` for various integrations
# https://github.com/drewcrawford/CaveJohnson
#!/bin/bash
PATH=/Library/Frameworks/Python.framework/Versions/3.4/bin:$PATH
# GitHub
# First set a github auth token like so:
# sudo -u _xcsbuildd cavejohnson setGithubAuthToken --token auth_token_generated_by_github
# Set build status on GitHub
cavejohnson setGithubStatus
echo "Finished running Xcode Bot's Run Script Trigger"
| # This script should be copied into the Run Script trigger of an Xcode Bot
# `Xcode Bot > Edit Bot > Triggers > After Integration > Run Script`
Add displaying integration status on GitHub# This script should be copied into the Run Script trigger of an Xcode Bot
# `Xcode Bot > Edit Bot > Triggers > After Integration > Run Script`
# Utilizes `cavejohnson` for various integrations
# https://github.com/drewcrawford/CaveJohnson
#!/bin/bash
PATH=/Library/Frameworks/Python.framework/Versions/3.4/bin:$PATH
# GitHub
# First set a github auth token like so:
# sudo -u _xcsbuildd cavejohnson setGithubAuthToken --token auth_token_generated_by_github
# Set build status on GitHub
cavejohnson setGithubStatus
echo "Finished running Xcode Bot's Run Script Trigger"
| <commit_before># This script should be copied into the Run Script trigger of an Xcode Bot
# `Xcode Bot > Edit Bot > Triggers > After Integration > Run Script`
<commit_msg>Add displaying integration status on GitHub<commit_after># This script should be copied into the Run Script trigger of an Xcode Bot
# `Xcode Bot > Edit Bot > Triggers > After Integration > Run Script`
# Utilizes `cavejohnson` for various integrations
# https://github.com/drewcrawford/CaveJohnson
#!/bin/bash
PATH=/Library/Frameworks/Python.framework/Versions/3.4/bin:$PATH
# GitHub
# First set a github auth token like so:
# sudo -u _xcsbuildd cavejohnson setGithubAuthToken --token auth_token_generated_by_github
# Set build status on GitHub
cavejohnson setGithubStatus
echo "Finished running Xcode Bot's Run Script Trigger"
|
e2aa41bb84984fea4c6b8ea475caf7f7af051dd9 | gaphor/codegen/codegen.py | gaphor/codegen/codegen.py | #!/usr/bin/env python
"""The Gaphor code generator CLI.
Provides the CLI for the code generator which transforms a Gaphor models
(with .gaphor file extension) in to a data model in Python.
"""
import argparse
from distutils.util import byte_compile
from pathlib import Path
from gaphor.codegen import profile_coder, uml_coder
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("modelfile", type=Path, help="gaphor model filename")
parser.add_argument("outfile", type=Path, help="python data model filename")
parser.add_argument("overrides", type=Path, help="override filename")
parser.add_argument(
"--uml_profile", help="generate a UML profile", action="store_true"
)
parser.add_argument(
"--sysml_profile", help="generate a SysML profile", action="store_true"
)
args = parser.parse_args()
print(f"Generating {args.outfile} from {args.modelfile}...")
print(" (warnings can be ignored)")
if args.uml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides)
elif args.sysml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides, True)
else:
uml_coder.generate(args.modelfile, args.outfile, args.overrides)
byte_compile([str(args.outfile)])
if __name__ == "__main__":
main()
| #!/usr/bin/env python
"""The Gaphor code generator CLI.
Provides the CLI for the code generator which transforms a Gaphor models
(with .gaphor file extension) in to a data model in Python.
"""
import argparse
from distutils.util import byte_compile
from pathlib import Path
from gaphor.codegen import profile_coder, uml_coder
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("modelfile", type=Path, help="gaphor model filename")
parser.add_argument("outfile", type=Path, help="python data model filename")
parser.add_argument("overrides", type=Path, help="override filename")
parser.add_argument(
"--uml_profile", help="generate a UML profile", action="store_true"
)
parser.add_argument(
"--sysml_profile", help="generate a SysML profile", action="store_true"
)
args = parser.parse_args()
print(f"Generating {args.outfile} from {args.modelfile}...")
print(" (warnings can be ignored)")
if args.uml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides)
elif args.sysml_profile:
profile_coder.generate(
args.modelfile, args.outfile, args.overrides, includes_sysml=True
)
else:
uml_coder.generate(args.modelfile, args.outfile, args.overrides)
byte_compile([str(args.outfile)])
if __name__ == "__main__":
main()
| Use positional argument to improve clarity | Use positional argument to improve clarity
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>
| Python | lgpl-2.1 | amolenaar/gaphor,amolenaar/gaphor | #!/usr/bin/env python
"""The Gaphor code generator CLI.
Provides the CLI for the code generator which transforms a Gaphor models
(with .gaphor file extension) in to a data model in Python.
"""
import argparse
from distutils.util import byte_compile
from pathlib import Path
from gaphor.codegen import profile_coder, uml_coder
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("modelfile", type=Path, help="gaphor model filename")
parser.add_argument("outfile", type=Path, help="python data model filename")
parser.add_argument("overrides", type=Path, help="override filename")
parser.add_argument(
"--uml_profile", help="generate a UML profile", action="store_true"
)
parser.add_argument(
"--sysml_profile", help="generate a SysML profile", action="store_true"
)
args = parser.parse_args()
print(f"Generating {args.outfile} from {args.modelfile}...")
print(" (warnings can be ignored)")
if args.uml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides)
elif args.sysml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides, True)
else:
uml_coder.generate(args.modelfile, args.outfile, args.overrides)
byte_compile([str(args.outfile)])
if __name__ == "__main__":
main()
Use positional argument to improve clarity
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me> | #!/usr/bin/env python
"""The Gaphor code generator CLI.
Provides the CLI for the code generator which transforms a Gaphor models
(with .gaphor file extension) in to a data model in Python.
"""
import argparse
from distutils.util import byte_compile
from pathlib import Path
from gaphor.codegen import profile_coder, uml_coder
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("modelfile", type=Path, help="gaphor model filename")
parser.add_argument("outfile", type=Path, help="python data model filename")
parser.add_argument("overrides", type=Path, help="override filename")
parser.add_argument(
"--uml_profile", help="generate a UML profile", action="store_true"
)
parser.add_argument(
"--sysml_profile", help="generate a SysML profile", action="store_true"
)
args = parser.parse_args()
print(f"Generating {args.outfile} from {args.modelfile}...")
print(" (warnings can be ignored)")
if args.uml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides)
elif args.sysml_profile:
profile_coder.generate(
args.modelfile, args.outfile, args.overrides, includes_sysml=True
)
else:
uml_coder.generate(args.modelfile, args.outfile, args.overrides)
byte_compile([str(args.outfile)])
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
"""The Gaphor code generator CLI.
Provides the CLI for the code generator which transforms a Gaphor models
(with .gaphor file extension) in to a data model in Python.
"""
import argparse
from distutils.util import byte_compile
from pathlib import Path
from gaphor.codegen import profile_coder, uml_coder
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("modelfile", type=Path, help="gaphor model filename")
parser.add_argument("outfile", type=Path, help="python data model filename")
parser.add_argument("overrides", type=Path, help="override filename")
parser.add_argument(
"--uml_profile", help="generate a UML profile", action="store_true"
)
parser.add_argument(
"--sysml_profile", help="generate a SysML profile", action="store_true"
)
args = parser.parse_args()
print(f"Generating {args.outfile} from {args.modelfile}...")
print(" (warnings can be ignored)")
if args.uml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides)
elif args.sysml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides, True)
else:
uml_coder.generate(args.modelfile, args.outfile, args.overrides)
byte_compile([str(args.outfile)])
if __name__ == "__main__":
main()
<commit_msg>Use positional argument to improve clarity
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me><commit_after> | #!/usr/bin/env python
"""The Gaphor code generator CLI.
Provides the CLI for the code generator which transforms a Gaphor models
(with .gaphor file extension) in to a data model in Python.
"""
import argparse
from distutils.util import byte_compile
from pathlib import Path
from gaphor.codegen import profile_coder, uml_coder
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("modelfile", type=Path, help="gaphor model filename")
parser.add_argument("outfile", type=Path, help="python data model filename")
parser.add_argument("overrides", type=Path, help="override filename")
parser.add_argument(
"--uml_profile", help="generate a UML profile", action="store_true"
)
parser.add_argument(
"--sysml_profile", help="generate a SysML profile", action="store_true"
)
args = parser.parse_args()
print(f"Generating {args.outfile} from {args.modelfile}...")
print(" (warnings can be ignored)")
if args.uml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides)
elif args.sysml_profile:
profile_coder.generate(
args.modelfile, args.outfile, args.overrides, includes_sysml=True
)
else:
uml_coder.generate(args.modelfile, args.outfile, args.overrides)
byte_compile([str(args.outfile)])
if __name__ == "__main__":
main()
| #!/usr/bin/env python
"""The Gaphor code generator CLI.
Provides the CLI for the code generator which transforms a Gaphor models
(with .gaphor file extension) in to a data model in Python.
"""
import argparse
from distutils.util import byte_compile
from pathlib import Path
from gaphor.codegen import profile_coder, uml_coder
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("modelfile", type=Path, help="gaphor model filename")
parser.add_argument("outfile", type=Path, help="python data model filename")
parser.add_argument("overrides", type=Path, help="override filename")
parser.add_argument(
"--uml_profile", help="generate a UML profile", action="store_true"
)
parser.add_argument(
"--sysml_profile", help="generate a SysML profile", action="store_true"
)
args = parser.parse_args()
print(f"Generating {args.outfile} from {args.modelfile}...")
print(" (warnings can be ignored)")
if args.uml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides)
elif args.sysml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides, True)
else:
uml_coder.generate(args.modelfile, args.outfile, args.overrides)
byte_compile([str(args.outfile)])
if __name__ == "__main__":
main()
Use positional argument to improve clarity
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>#!/usr/bin/env python
"""The Gaphor code generator CLI.
Provides the CLI for the code generator which transforms a Gaphor models
(with .gaphor file extension) in to a data model in Python.
"""
import argparse
from distutils.util import byte_compile
from pathlib import Path
from gaphor.codegen import profile_coder, uml_coder
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("modelfile", type=Path, help="gaphor model filename")
parser.add_argument("outfile", type=Path, help="python data model filename")
parser.add_argument("overrides", type=Path, help="override filename")
parser.add_argument(
"--uml_profile", help="generate a UML profile", action="store_true"
)
parser.add_argument(
"--sysml_profile", help="generate a SysML profile", action="store_true"
)
args = parser.parse_args()
print(f"Generating {args.outfile} from {args.modelfile}...")
print(" (warnings can be ignored)")
if args.uml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides)
elif args.sysml_profile:
profile_coder.generate(
args.modelfile, args.outfile, args.overrides, includes_sysml=True
)
else:
uml_coder.generate(args.modelfile, args.outfile, args.overrides)
byte_compile([str(args.outfile)])
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
"""The Gaphor code generator CLI.
Provides the CLI for the code generator which transforms a Gaphor models
(with .gaphor file extension) in to a data model in Python.
"""
import argparse
from distutils.util import byte_compile
from pathlib import Path
from gaphor.codegen import profile_coder, uml_coder
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("modelfile", type=Path, help="gaphor model filename")
parser.add_argument("outfile", type=Path, help="python data model filename")
parser.add_argument("overrides", type=Path, help="override filename")
parser.add_argument(
"--uml_profile", help="generate a UML profile", action="store_true"
)
parser.add_argument(
"--sysml_profile", help="generate a SysML profile", action="store_true"
)
args = parser.parse_args()
print(f"Generating {args.outfile} from {args.modelfile}...")
print(" (warnings can be ignored)")
if args.uml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides)
elif args.sysml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides, True)
else:
uml_coder.generate(args.modelfile, args.outfile, args.overrides)
byte_compile([str(args.outfile)])
if __name__ == "__main__":
main()
<commit_msg>Use positional argument to improve clarity
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me><commit_after>#!/usr/bin/env python
"""The Gaphor code generator CLI.
Provides the CLI for the code generator which transforms a Gaphor models
(with .gaphor file extension) in to a data model in Python.
"""
import argparse
from distutils.util import byte_compile
from pathlib import Path
from gaphor.codegen import profile_coder, uml_coder
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("modelfile", type=Path, help="gaphor model filename")
parser.add_argument("outfile", type=Path, help="python data model filename")
parser.add_argument("overrides", type=Path, help="override filename")
parser.add_argument(
"--uml_profile", help="generate a UML profile", action="store_true"
)
parser.add_argument(
"--sysml_profile", help="generate a SysML profile", action="store_true"
)
args = parser.parse_args()
print(f"Generating {args.outfile} from {args.modelfile}...")
print(" (warnings can be ignored)")
if args.uml_profile:
profile_coder.generate(args.modelfile, args.outfile, args.overrides)
elif args.sysml_profile:
profile_coder.generate(
args.modelfile, args.outfile, args.overrides, includes_sysml=True
)
else:
uml_coder.generate(args.modelfile, args.outfile, args.overrides)
byte_compile([str(args.outfile)])
if __name__ == "__main__":
main()
|
f5d00ed283da255b8cd2c82b36e19ab9504a7dd4 | webmanager/management/commands/create_default_super_user.py | webmanager/management/commands/create_default_super_user.py | from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
create_admin(super_username,
super_password, "r@j.cn")
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin() | from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
if not User.objects.filter(username=super_username).exists:
create_admin(super_username, super_password, "r@j.cn")
print "default admin already created"
else:
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin() | Check before creating default super user. | Check before creating default super user.
| Python | bsd-3-clause | weijia/webmanager,weijia/webmanager,weijia/webmanager | from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
create_admin(super_username,
super_password, "r@j.cn")
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin()Check before creating default super user. | from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
if not User.objects.filter(username=super_username).exists:
create_admin(super_username, super_password, "r@j.cn")
print "default admin already created"
else:
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin() | <commit_before>from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
create_admin(super_username,
super_password, "r@j.cn")
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin()<commit_msg>Check before creating default super user.<commit_after> | from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
if not User.objects.filter(username=super_username).exists:
create_admin(super_username, super_password, "r@j.cn")
print "default admin already created"
else:
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin() | from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
create_admin(super_username,
super_password, "r@j.cn")
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin()Check before creating default super user.from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
if not User.objects.filter(username=super_username).exists:
create_admin(super_username, super_password, "r@j.cn")
print "default admin already created"
else:
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin() | <commit_before>from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
create_admin(super_username,
super_password, "r@j.cn")
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin()<commit_msg>Check before creating default super user.<commit_after>from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
if not User.objects.filter(username=super_username).exists:
create_admin(super_username, super_password, "r@j.cn")
print "default admin already created"
else:
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin() |
106d56e734140d006a083965e55560a55e21e428 | NGrams.py | NGrams.py | def generate_ngrams(text, n):
''' Generates all possible n-grams of a
piece of text
>>> text = 'this is a random piece'
>>> n = 2
>>> generate_ngrams(text, n)
this is
is a
a random
random piece
'''
text_array = text.split(' ')
for i in range(0, len(text_array) - n + 1):
print text_array[i:i + n]
generate_ngrams('this is a random piece', 2)
generate_ngrams('this is a random piece', 3)
| def generate_ngrams(text, n):
''' Generates all possible n-grams of a
piece of text
>>> text = 'this is a random piece'
>>> n = 2
>>> generate_ngrams(text, n)
this is
is a
a random
random piece
'''
text_array = text.split(' ')
ngram_list = []
for i in range(0, len(text_array) - n + 1):
ngram_list.append(text_array[i:i + n])
return ngram_list
print generate_ngrams('this is a random piece', 2)
print generate_ngrams('this is a random piece', 3)
print generate_ngrams('this is a random piece', 4)
| Return a list of lists | Return a list of lists
| Python | bsd-2-clause | ambidextrousTx/RNLTK | def generate_ngrams(text, n):
''' Generates all possible n-grams of a
piece of text
>>> text = 'this is a random piece'
>>> n = 2
>>> generate_ngrams(text, n)
this is
is a
a random
random piece
'''
text_array = text.split(' ')
for i in range(0, len(text_array) - n + 1):
print text_array[i:i + n]
generate_ngrams('this is a random piece', 2)
generate_ngrams('this is a random piece', 3)
Return a list of lists | def generate_ngrams(text, n):
''' Generates all possible n-grams of a
piece of text
>>> text = 'this is a random piece'
>>> n = 2
>>> generate_ngrams(text, n)
this is
is a
a random
random piece
'''
text_array = text.split(' ')
ngram_list = []
for i in range(0, len(text_array) - n + 1):
ngram_list.append(text_array[i:i + n])
return ngram_list
print generate_ngrams('this is a random piece', 2)
print generate_ngrams('this is a random piece', 3)
print generate_ngrams('this is a random piece', 4)
| <commit_before>def generate_ngrams(text, n):
''' Generates all possible n-grams of a
piece of text
>>> text = 'this is a random piece'
>>> n = 2
>>> generate_ngrams(text, n)
this is
is a
a random
random piece
'''
text_array = text.split(' ')
for i in range(0, len(text_array) - n + 1):
print text_array[i:i + n]
generate_ngrams('this is a random piece', 2)
generate_ngrams('this is a random piece', 3)
<commit_msg>Return a list of lists<commit_after> | def generate_ngrams(text, n):
''' Generates all possible n-grams of a
piece of text
>>> text = 'this is a random piece'
>>> n = 2
>>> generate_ngrams(text, n)
this is
is a
a random
random piece
'''
text_array = text.split(' ')
ngram_list = []
for i in range(0, len(text_array) - n + 1):
ngram_list.append(text_array[i:i + n])
return ngram_list
print generate_ngrams('this is a random piece', 2)
print generate_ngrams('this is a random piece', 3)
print generate_ngrams('this is a random piece', 4)
| def generate_ngrams(text, n):
''' Generates all possible n-grams of a
piece of text
>>> text = 'this is a random piece'
>>> n = 2
>>> generate_ngrams(text, n)
this is
is a
a random
random piece
'''
text_array = text.split(' ')
for i in range(0, len(text_array) - n + 1):
print text_array[i:i + n]
generate_ngrams('this is a random piece', 2)
generate_ngrams('this is a random piece', 3)
Return a list of listsdef generate_ngrams(text, n):
''' Generates all possible n-grams of a
piece of text
>>> text = 'this is a random piece'
>>> n = 2
>>> generate_ngrams(text, n)
this is
is a
a random
random piece
'''
text_array = text.split(' ')
ngram_list = []
for i in range(0, len(text_array) - n + 1):
ngram_list.append(text_array[i:i + n])
return ngram_list
print generate_ngrams('this is a random piece', 2)
print generate_ngrams('this is a random piece', 3)
print generate_ngrams('this is a random piece', 4)
| <commit_before>def generate_ngrams(text, n):
''' Generates all possible n-grams of a
piece of text
>>> text = 'this is a random piece'
>>> n = 2
>>> generate_ngrams(text, n)
this is
is a
a random
random piece
'''
text_array = text.split(' ')
for i in range(0, len(text_array) - n + 1):
print text_array[i:i + n]
generate_ngrams('this is a random piece', 2)
generate_ngrams('this is a random piece', 3)
<commit_msg>Return a list of lists<commit_after>def generate_ngrams(text, n):
''' Generates all possible n-grams of a
piece of text
>>> text = 'this is a random piece'
>>> n = 2
>>> generate_ngrams(text, n)
this is
is a
a random
random piece
'''
text_array = text.split(' ')
ngram_list = []
for i in range(0, len(text_array) - n + 1):
ngram_list.append(text_array[i:i + n])
return ngram_list
print generate_ngrams('this is a random piece', 2)
print generate_ngrams('this is a random piece', 3)
print generate_ngrams('this is a random piece', 4)
|
1157fb15f938aae8cfc10392fe816d691c3b41e7 | todoist/managers/generic.py | todoist/managers/generic.py | # -*- coding: utf-8 -*-
class Manager(object):
# should be re-defined in a subclass
state_name = None
object_type = None
def __init__(self, api):
self.api = api
# shortcuts
@property
def state(self):
return self.api.state
@property
def queue(self):
return self.api.queue
class AllMixin(object):
def all(self, filt=None):
return list(filter(filt, self.state[self.state_name]))
class GetByIdMixin(object):
def get_by_id(self, obj_id, only_local=False):
"""
Finds and returns the object based on its id.
"""
for obj in self.state[self.state_name]:
if obj['id'] == obj_id or obj.temp_id == str(obj_id):
return obj
if not only_local:
getter = getattr(self.api, 'get_%s' % self.object_type)
return getter(obj_id)
return None
class SyncMixin(object):
"""
Syncs this specific type of objects.
"""
def sync(self):
return self.api.sync()
| # -*- coding: utf-8 -*-
class Manager(object):
# should be re-defined in a subclass
state_name = None
object_type = None
def __init__(self, api):
self.api = api
# shortcuts
@property
def state(self):
return self.api.state
@property
def queue(self):
return self.api.queue
class AllMixin(object):
def all(self, filt=None):
return list(filter(filt, self.state[self.state_name]))
class GetByIdMixin(object):
def get_by_id(self, obj_id, only_local=False):
"""
Finds and returns the object based on its id.
"""
for obj in self.state[self.state_name]:
if obj['id'] == obj_id or obj.temp_id == str(obj_id):
return obj
if not only_local and self.object_type is not None:
getter = getattr(self.api, 'get_%s' % self.object_type)
return getter(obj_id)
return None
class SyncMixin(object):
"""
Syncs this specific type of objects.
"""
def sync(self):
return self.api.sync()
| Use the remote getter call only on objects with an object_type. | Use the remote getter call only on objects with an object_type.
| Python | mit | Doist/todoist-python | # -*- coding: utf-8 -*-
class Manager(object):
# should be re-defined in a subclass
state_name = None
object_type = None
def __init__(self, api):
self.api = api
# shortcuts
@property
def state(self):
return self.api.state
@property
def queue(self):
return self.api.queue
class AllMixin(object):
def all(self, filt=None):
return list(filter(filt, self.state[self.state_name]))
class GetByIdMixin(object):
def get_by_id(self, obj_id, only_local=False):
"""
Finds and returns the object based on its id.
"""
for obj in self.state[self.state_name]:
if obj['id'] == obj_id or obj.temp_id == str(obj_id):
return obj
if not only_local:
getter = getattr(self.api, 'get_%s' % self.object_type)
return getter(obj_id)
return None
class SyncMixin(object):
"""
Syncs this specific type of objects.
"""
def sync(self):
return self.api.sync()
Use the remote getter call only on objects with an object_type. | # -*- coding: utf-8 -*-
class Manager(object):
# should be re-defined in a subclass
state_name = None
object_type = None
def __init__(self, api):
self.api = api
# shortcuts
@property
def state(self):
return self.api.state
@property
def queue(self):
return self.api.queue
class AllMixin(object):
def all(self, filt=None):
return list(filter(filt, self.state[self.state_name]))
class GetByIdMixin(object):
def get_by_id(self, obj_id, only_local=False):
"""
Finds and returns the object based on its id.
"""
for obj in self.state[self.state_name]:
if obj['id'] == obj_id or obj.temp_id == str(obj_id):
return obj
if not only_local and self.object_type is not None:
getter = getattr(self.api, 'get_%s' % self.object_type)
return getter(obj_id)
return None
class SyncMixin(object):
"""
Syncs this specific type of objects.
"""
def sync(self):
return self.api.sync()
| <commit_before># -*- coding: utf-8 -*-
class Manager(object):
# should be re-defined in a subclass
state_name = None
object_type = None
def __init__(self, api):
self.api = api
# shortcuts
@property
def state(self):
return self.api.state
@property
def queue(self):
return self.api.queue
class AllMixin(object):
def all(self, filt=None):
return list(filter(filt, self.state[self.state_name]))
class GetByIdMixin(object):
def get_by_id(self, obj_id, only_local=False):
"""
Finds and returns the object based on its id.
"""
for obj in self.state[self.state_name]:
if obj['id'] == obj_id or obj.temp_id == str(obj_id):
return obj
if not only_local:
getter = getattr(self.api, 'get_%s' % self.object_type)
return getter(obj_id)
return None
class SyncMixin(object):
"""
Syncs this specific type of objects.
"""
def sync(self):
return self.api.sync()
<commit_msg>Use the remote getter call only on objects with an object_type.<commit_after> | # -*- coding: utf-8 -*-
class Manager(object):
# should be re-defined in a subclass
state_name = None
object_type = None
def __init__(self, api):
self.api = api
# shortcuts
@property
def state(self):
return self.api.state
@property
def queue(self):
return self.api.queue
class AllMixin(object):
def all(self, filt=None):
return list(filter(filt, self.state[self.state_name]))
class GetByIdMixin(object):
def get_by_id(self, obj_id, only_local=False):
"""
Finds and returns the object based on its id.
"""
for obj in self.state[self.state_name]:
if obj['id'] == obj_id or obj.temp_id == str(obj_id):
return obj
if not only_local and self.object_type is not None:
getter = getattr(self.api, 'get_%s' % self.object_type)
return getter(obj_id)
return None
class SyncMixin(object):
"""
Syncs this specific type of objects.
"""
def sync(self):
return self.api.sync()
| # -*- coding: utf-8 -*-
class Manager(object):
# should be re-defined in a subclass
state_name = None
object_type = None
def __init__(self, api):
self.api = api
# shortcuts
@property
def state(self):
return self.api.state
@property
def queue(self):
return self.api.queue
class AllMixin(object):
def all(self, filt=None):
return list(filter(filt, self.state[self.state_name]))
class GetByIdMixin(object):
def get_by_id(self, obj_id, only_local=False):
"""
Finds and returns the object based on its id.
"""
for obj in self.state[self.state_name]:
if obj['id'] == obj_id or obj.temp_id == str(obj_id):
return obj
if not only_local:
getter = getattr(self.api, 'get_%s' % self.object_type)
return getter(obj_id)
return None
class SyncMixin(object):
"""
Syncs this specific type of objects.
"""
def sync(self):
return self.api.sync()
Use the remote getter call only on objects with an object_type.# -*- coding: utf-8 -*-
class Manager(object):
# should be re-defined in a subclass
state_name = None
object_type = None
def __init__(self, api):
self.api = api
# shortcuts
@property
def state(self):
return self.api.state
@property
def queue(self):
return self.api.queue
class AllMixin(object):
def all(self, filt=None):
return list(filter(filt, self.state[self.state_name]))
class GetByIdMixin(object):
def get_by_id(self, obj_id, only_local=False):
"""
Finds and returns the object based on its id.
"""
for obj in self.state[self.state_name]:
if obj['id'] == obj_id or obj.temp_id == str(obj_id):
return obj
if not only_local and self.object_type is not None:
getter = getattr(self.api, 'get_%s' % self.object_type)
return getter(obj_id)
return None
class SyncMixin(object):
"""
Syncs this specific type of objects.
"""
def sync(self):
return self.api.sync()
| <commit_before># -*- coding: utf-8 -*-
class Manager(object):
# should be re-defined in a subclass
state_name = None
object_type = None
def __init__(self, api):
self.api = api
# shortcuts
@property
def state(self):
return self.api.state
@property
def queue(self):
return self.api.queue
class AllMixin(object):
def all(self, filt=None):
return list(filter(filt, self.state[self.state_name]))
class GetByIdMixin(object):
def get_by_id(self, obj_id, only_local=False):
"""
Finds and returns the object based on its id.
"""
for obj in self.state[self.state_name]:
if obj['id'] == obj_id or obj.temp_id == str(obj_id):
return obj
if not only_local:
getter = getattr(self.api, 'get_%s' % self.object_type)
return getter(obj_id)
return None
class SyncMixin(object):
"""
Syncs this specific type of objects.
"""
def sync(self):
return self.api.sync()
<commit_msg>Use the remote getter call only on objects with an object_type.<commit_after># -*- coding: utf-8 -*-
class Manager(object):
# should be re-defined in a subclass
state_name = None
object_type = None
def __init__(self, api):
self.api = api
# shortcuts
@property
def state(self):
return self.api.state
@property
def queue(self):
return self.api.queue
class AllMixin(object):
def all(self, filt=None):
return list(filter(filt, self.state[self.state_name]))
class GetByIdMixin(object):
def get_by_id(self, obj_id, only_local=False):
"""
Finds and returns the object based on its id.
"""
for obj in self.state[self.state_name]:
if obj['id'] == obj_id or obj.temp_id == str(obj_id):
return obj
if not only_local and self.object_type is not None:
getter = getattr(self.api, 'get_%s' % self.object_type)
return getter(obj_id)
return None
class SyncMixin(object):
"""
Syncs this specific type of objects.
"""
def sync(self):
return self.api.sync()
|
aaaa20be61e96daf61e397fdf54dfaf6bec461e8 | falcom/api/worldcat/data.py | falcom/api/worldcat/data.py | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from ..common import ReadOnlyDataStructure
class WorldcatData (ReadOnlyDataStructure):
@property
def title (self):
return self.get("title")
def __iter__ (self):
return iter(self.get("libraries", ()))
| # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from ..common import ReadOnlyDataStructure
class WorldcatData (ReadOnlyDataStructure):
auto_properties = ("title",)
def __iter__ (self):
return iter(self.get("libraries", ()))
| Use new property format for WorldcatData | Use new property format for WorldcatData
| Python | bsd-3-clause | mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from ..common import ReadOnlyDataStructure
class WorldcatData (ReadOnlyDataStructure):
@property
def title (self):
return self.get("title")
def __iter__ (self):
return iter(self.get("libraries", ()))
Use new property format for WorldcatData | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from ..common import ReadOnlyDataStructure
class WorldcatData (ReadOnlyDataStructure):
auto_properties = ("title",)
def __iter__ (self):
return iter(self.get("libraries", ()))
| <commit_before># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from ..common import ReadOnlyDataStructure
class WorldcatData (ReadOnlyDataStructure):
@property
def title (self):
return self.get("title")
def __iter__ (self):
return iter(self.get("libraries", ()))
<commit_msg>Use new property format for WorldcatData<commit_after> | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from ..common import ReadOnlyDataStructure
class WorldcatData (ReadOnlyDataStructure):
auto_properties = ("title",)
def __iter__ (self):
return iter(self.get("libraries", ()))
| # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from ..common import ReadOnlyDataStructure
class WorldcatData (ReadOnlyDataStructure):
@property
def title (self):
return self.get("title")
def __iter__ (self):
return iter(self.get("libraries", ()))
Use new property format for WorldcatData# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from ..common import ReadOnlyDataStructure
class WorldcatData (ReadOnlyDataStructure):
auto_properties = ("title",)
def __iter__ (self):
return iter(self.get("libraries", ()))
| <commit_before># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from ..common import ReadOnlyDataStructure
class WorldcatData (ReadOnlyDataStructure):
@property
def title (self):
return self.get("title")
def __iter__ (self):
return iter(self.get("libraries", ()))
<commit_msg>Use new property format for WorldcatData<commit_after># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from ..common import ReadOnlyDataStructure
class WorldcatData (ReadOnlyDataStructure):
auto_properties = ("title",)
def __iter__ (self):
return iter(self.get("libraries", ()))
|
0ff547915fc9de3d5edb80cc31a0f561453f3687 | salt/returners/syslog_return.py | salt/returners/syslog_return.py | '''
Return data to the host operating system's syslog facility
Required python modules: syslog, json
The syslog returner simply reuses the operating system's syslog
facility to log return data
'''
# Import python libs
import syslog
import json
def __virtual__():
return 'syslog'
def returner(ret):
'''
Return data to the local syslog
'''
syslog.syslog(syslog.LOG_INFO, 'salt-minion: {0}'.format(json.dumps(ret)))
| '''
Return data to the host operating system's syslog facility
Required python modules: syslog, json
The syslog returner simply reuses the operating system's syslog
facility to log return data
'''
# Import python libs
import syslog
import json
try:
import syslog
HAS_SYSLOG = True
except ImportError:
HAS_SYSLOG = False
def __virtual__():
if not HAS_SYSLOG:
return False
return 'syslog'
def returner(ret):
'''
Return data to the local syslog
'''
syslog.syslog(syslog.LOG_INFO, 'salt-minion: {0}'.format(json.dumps(ret)))
| Check for syslog. Doesn't exist on Windows | Check for syslog. Doesn't exist on Windows
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | '''
Return data to the host operating system's syslog facility
Required python modules: syslog, json
The syslog returner simply reuses the operating system's syslog
facility to log return data
'''
# Import python libs
import syslog
import json
def __virtual__():
return 'syslog'
def returner(ret):
'''
Return data to the local syslog
'''
syslog.syslog(syslog.LOG_INFO, 'salt-minion: {0}'.format(json.dumps(ret)))
Check for syslog. Doesn't exist on Windows | '''
Return data to the host operating system's syslog facility
Required python modules: syslog, json
The syslog returner simply reuses the operating system's syslog
facility to log return data
'''
# Import python libs
import syslog
import json
try:
import syslog
HAS_SYSLOG = True
except ImportError:
HAS_SYSLOG = False
def __virtual__():
if not HAS_SYSLOG:
return False
return 'syslog'
def returner(ret):
'''
Return data to the local syslog
'''
syslog.syslog(syslog.LOG_INFO, 'salt-minion: {0}'.format(json.dumps(ret)))
| <commit_before>'''
Return data to the host operating system's syslog facility
Required python modules: syslog, json
The syslog returner simply reuses the operating system's syslog
facility to log return data
'''
# Import python libs
import syslog
import json
def __virtual__():
return 'syslog'
def returner(ret):
'''
Return data to the local syslog
'''
syslog.syslog(syslog.LOG_INFO, 'salt-minion: {0}'.format(json.dumps(ret)))
<commit_msg>Check for syslog. Doesn't exist on Windows<commit_after> | '''
Return data to the host operating system's syslog facility
Required python modules: syslog, json
The syslog returner simply reuses the operating system's syslog
facility to log return data
'''
# Import python libs
import syslog
import json
try:
import syslog
HAS_SYSLOG = True
except ImportError:
HAS_SYSLOG = False
def __virtual__():
if not HAS_SYSLOG:
return False
return 'syslog'
def returner(ret):
'''
Return data to the local syslog
'''
syslog.syslog(syslog.LOG_INFO, 'salt-minion: {0}'.format(json.dumps(ret)))
| '''
Return data to the host operating system's syslog facility
Required python modules: syslog, json
The syslog returner simply reuses the operating system's syslog
facility to log return data
'''
# Import python libs
import syslog
import json
def __virtual__():
return 'syslog'
def returner(ret):
'''
Return data to the local syslog
'''
syslog.syslog(syslog.LOG_INFO, 'salt-minion: {0}'.format(json.dumps(ret)))
Check for syslog. Doesn't exist on Windows'''
Return data to the host operating system's syslog facility
Required python modules: syslog, json
The syslog returner simply reuses the operating system's syslog
facility to log return data
'''
# Import python libs
import syslog
import json
try:
import syslog
HAS_SYSLOG = True
except ImportError:
HAS_SYSLOG = False
def __virtual__():
if not HAS_SYSLOG:
return False
return 'syslog'
def returner(ret):
'''
Return data to the local syslog
'''
syslog.syslog(syslog.LOG_INFO, 'salt-minion: {0}'.format(json.dumps(ret)))
| <commit_before>'''
Return data to the host operating system's syslog facility
Required python modules: syslog, json
The syslog returner simply reuses the operating system's syslog
facility to log return data
'''
# Import python libs
import syslog
import json
def __virtual__():
return 'syslog'
def returner(ret):
'''
Return data to the local syslog
'''
syslog.syslog(syslog.LOG_INFO, 'salt-minion: {0}'.format(json.dumps(ret)))
<commit_msg>Check for syslog. Doesn't exist on Windows<commit_after>'''
Return data to the host operating system's syslog facility
Required python modules: syslog, json
The syslog returner simply reuses the operating system's syslog
facility to log return data
'''
# Import python libs
import syslog
import json
try:
import syslog
HAS_SYSLOG = True
except ImportError:
HAS_SYSLOG = False
def __virtual__():
if not HAS_SYSLOG:
return False
return 'syslog'
def returner(ret):
'''
Return data to the local syslog
'''
syslog.syslog(syslog.LOG_INFO, 'salt-minion: {0}'.format(json.dumps(ret)))
|
791d378d1c5cb2e9729877bc70261b9354bdb590 | testsuite/cases/pillow_rotate_right.py | testsuite/cases/pillow_rotate_right.py | # coding: utf-8
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
im.transpose(self.transposition)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', Image.FLIP_LEFT_RIGHT),
rpartial(RotateRightCase, 'Flip', Image.FLIP_TOP_BOTTOM),
rpartial(RotateRightCase, 'Rotate 90', Image.ROTATE_90),
rpartial(RotateRightCase, 'Rotate 180', Image.ROTATE_180),
rpartial(RotateRightCase, 'Rotate 270', Image.ROTATE_270),
]
if hasattr(Image, 'TRANSPOSE'):
cases.append(rpartial(RotateRightCase, 'Transpose', Image.TRANSPOSE))
| # coding: utf-8
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
for trans in self.transposition:
im = im.transpose(trans)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', [Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Flip', [Image.FLIP_TOP_BOTTOM]),
rpartial(RotateRightCase, 'Rotate 90', [Image.ROTATE_90]),
rpartial(RotateRightCase, 'Rotate 180', [Image.ROTATE_180]),
rpartial(RotateRightCase, 'Rotate 270', [Image.ROTATE_270]),
rpartial(RotateRightCase, 'Transpose',
[Image.TRANSPOSE]
if hasattr(Image, 'TRANSPOSE')
else [Image.ROTATE_90, Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Transpose180',
[Image.TRANSPOSE_ROTATE_180]
if hasattr(Image, 'TRANSPOSE_ROTATE_180')
else [Image.ROTATE_270, Image.FLIP_LEFT_RIGHT]),
]
| Transpose and Transpose180 for all Pillow versions | Transpose and Transpose180 for all Pillow versions
| Python | mit | python-pillow/pillow-perf,python-pillow/pillow-perf | # coding: utf-8
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
im.transpose(self.transposition)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', Image.FLIP_LEFT_RIGHT),
rpartial(RotateRightCase, 'Flip', Image.FLIP_TOP_BOTTOM),
rpartial(RotateRightCase, 'Rotate 90', Image.ROTATE_90),
rpartial(RotateRightCase, 'Rotate 180', Image.ROTATE_180),
rpartial(RotateRightCase, 'Rotate 270', Image.ROTATE_270),
]
if hasattr(Image, 'TRANSPOSE'):
cases.append(rpartial(RotateRightCase, 'Transpose', Image.TRANSPOSE))
Transpose and Transpose180 for all Pillow versions | # coding: utf-8
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
for trans in self.transposition:
im = im.transpose(trans)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', [Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Flip', [Image.FLIP_TOP_BOTTOM]),
rpartial(RotateRightCase, 'Rotate 90', [Image.ROTATE_90]),
rpartial(RotateRightCase, 'Rotate 180', [Image.ROTATE_180]),
rpartial(RotateRightCase, 'Rotate 270', [Image.ROTATE_270]),
rpartial(RotateRightCase, 'Transpose',
[Image.TRANSPOSE]
if hasattr(Image, 'TRANSPOSE')
else [Image.ROTATE_90, Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Transpose180',
[Image.TRANSPOSE_ROTATE_180]
if hasattr(Image, 'TRANSPOSE_ROTATE_180')
else [Image.ROTATE_270, Image.FLIP_LEFT_RIGHT]),
]
| <commit_before># coding: utf-8
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
im.transpose(self.transposition)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', Image.FLIP_LEFT_RIGHT),
rpartial(RotateRightCase, 'Flip', Image.FLIP_TOP_BOTTOM),
rpartial(RotateRightCase, 'Rotate 90', Image.ROTATE_90),
rpartial(RotateRightCase, 'Rotate 180', Image.ROTATE_180),
rpartial(RotateRightCase, 'Rotate 270', Image.ROTATE_270),
]
if hasattr(Image, 'TRANSPOSE'):
cases.append(rpartial(RotateRightCase, 'Transpose', Image.TRANSPOSE))
<commit_msg>Transpose and Transpose180 for all Pillow versions<commit_after> | # coding: utf-8
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
for trans in self.transposition:
im = im.transpose(trans)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', [Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Flip', [Image.FLIP_TOP_BOTTOM]),
rpartial(RotateRightCase, 'Rotate 90', [Image.ROTATE_90]),
rpartial(RotateRightCase, 'Rotate 180', [Image.ROTATE_180]),
rpartial(RotateRightCase, 'Rotate 270', [Image.ROTATE_270]),
rpartial(RotateRightCase, 'Transpose',
[Image.TRANSPOSE]
if hasattr(Image, 'TRANSPOSE')
else [Image.ROTATE_90, Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Transpose180',
[Image.TRANSPOSE_ROTATE_180]
if hasattr(Image, 'TRANSPOSE_ROTATE_180')
else [Image.ROTATE_270, Image.FLIP_LEFT_RIGHT]),
]
| # coding: utf-8
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
im.transpose(self.transposition)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', Image.FLIP_LEFT_RIGHT),
rpartial(RotateRightCase, 'Flip', Image.FLIP_TOP_BOTTOM),
rpartial(RotateRightCase, 'Rotate 90', Image.ROTATE_90),
rpartial(RotateRightCase, 'Rotate 180', Image.ROTATE_180),
rpartial(RotateRightCase, 'Rotate 270', Image.ROTATE_270),
]
if hasattr(Image, 'TRANSPOSE'):
cases.append(rpartial(RotateRightCase, 'Transpose', Image.TRANSPOSE))
Transpose and Transpose180 for all Pillow versions# coding: utf-8
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
for trans in self.transposition:
im = im.transpose(trans)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', [Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Flip', [Image.FLIP_TOP_BOTTOM]),
rpartial(RotateRightCase, 'Rotate 90', [Image.ROTATE_90]),
rpartial(RotateRightCase, 'Rotate 180', [Image.ROTATE_180]),
rpartial(RotateRightCase, 'Rotate 270', [Image.ROTATE_270]),
rpartial(RotateRightCase, 'Transpose',
[Image.TRANSPOSE]
if hasattr(Image, 'TRANSPOSE')
else [Image.ROTATE_90, Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Transpose180',
[Image.TRANSPOSE_ROTATE_180]
if hasattr(Image, 'TRANSPOSE_ROTATE_180')
else [Image.ROTATE_270, Image.FLIP_LEFT_RIGHT]),
]
| <commit_before># coding: utf-8
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
im.transpose(self.transposition)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', Image.FLIP_LEFT_RIGHT),
rpartial(RotateRightCase, 'Flip', Image.FLIP_TOP_BOTTOM),
rpartial(RotateRightCase, 'Rotate 90', Image.ROTATE_90),
rpartial(RotateRightCase, 'Rotate 180', Image.ROTATE_180),
rpartial(RotateRightCase, 'Rotate 270', Image.ROTATE_270),
]
if hasattr(Image, 'TRANSPOSE'):
cases.append(rpartial(RotateRightCase, 'Transpose', Image.TRANSPOSE))
<commit_msg>Transpose and Transpose180 for all Pillow versions<commit_after># coding: utf-8
from __future__ import print_function, unicode_literals, absolute_import
from PIL import Image
from .base import rpartial
from .pillow import PillowTestCase
class RotateRightCase(PillowTestCase):
def handle_args(self, name, transposition):
self.name = name
self.transposition = transposition
def runner(self, im):
for trans in self.transposition:
im = im.transpose(trans)
def readable_args(self):
return [self.name]
cases = [
rpartial(RotateRightCase, 'Flop', [Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Flip', [Image.FLIP_TOP_BOTTOM]),
rpartial(RotateRightCase, 'Rotate 90', [Image.ROTATE_90]),
rpartial(RotateRightCase, 'Rotate 180', [Image.ROTATE_180]),
rpartial(RotateRightCase, 'Rotate 270', [Image.ROTATE_270]),
rpartial(RotateRightCase, 'Transpose',
[Image.TRANSPOSE]
if hasattr(Image, 'TRANSPOSE')
else [Image.ROTATE_90, Image.FLIP_LEFT_RIGHT]),
rpartial(RotateRightCase, 'Transpose180',
[Image.TRANSPOSE_ROTATE_180]
if hasattr(Image, 'TRANSPOSE_ROTATE_180')
else [Image.ROTATE_270, Image.FLIP_LEFT_RIGHT]),
]
|
20df58bb9e605ecc53848ade31a3acb98118f00b | scripts/extract_clips_from_hdf5_file.py | scripts/extract_clips_from_hdf5_file.py | from pathlib import Path
import wave
import h5py
DIR_PATH = Path('/Users/harold/Desktop/Clips')
INPUT_FILE_PATH = DIR_PATH / 'Clips.h5'
CLIP_COUNT = 5
def main():
with h5py.File(INPUT_FILE_PATH, 'r') as file_:
clip_group = file_['clips']
for i, clip_id in enumerate(clip_group):
if i == CLIP_COUNT:
break
samples, sample_rate = read_clip(clip_group, clip_id)
print(clip_id, len(samples), samples.dtype, sample_rate)
write_wave_file(clip_id, samples, sample_rate)
def read_clip(clip_group, clip_id):
clip = clip_group[clip_id]
samples = clip[:]
sample_rate = clip.attrs['sample_rate']
return samples, sample_rate
def write_wave_file(i, samples, sample_rate):
file_name = f'{i}.wav'
file_path = DIR_PATH / file_name
with wave.open(str(file_path), 'wb') as file_:
file_.setparams((1, 2, sample_rate, len(samples), 'NONE', ''))
file_.writeframes(samples.tobytes())
if __name__ == '__main__':
main()
| from pathlib import Path
import wave
import h5py
DIR_PATH = Path('/Users/harold/Desktop/Clips')
INPUT_FILE_PATH = DIR_PATH / 'Clips.h5'
CLIP_COUNT = 5
def main():
with h5py.File(INPUT_FILE_PATH, 'r') as file_:
clip_group = file_['clips']
for i, clip_id in enumerate(clip_group):
if i == CLIP_COUNT:
break
samples, attributes = read_clip(clip_group, clip_id)
show_clip(clip_id, samples, attributes)
write_wave_file(clip_id, samples, attributes['sample_rate'])
def read_clip(clip_group, clip_id):
clip = clip_group[clip_id]
samples = clip[:]
attributes = dict((name, value) for name, value in clip.attrs.items())
return samples, attributes
def show_clip(clip_id, samples, attributes):
print(f'clip {clip_id}:')
print(f' length: {len(samples)}')
print(' attributes:')
for key in sorted(attributes.keys()):
value = attributes[key]
print(f' {key}: {value}')
print()
def write_wave_file(i, samples, sample_rate):
file_name = f'{i}.wav'
file_path = DIR_PATH / file_name
with wave.open(str(file_path), 'wb') as file_:
file_.setparams((1, 2, sample_rate, len(samples), 'NONE', ''))
file_.writeframes(samples.tobytes())
if __name__ == '__main__':
main()
| Add attribute display to clip extraction script. | Add attribute display to clip extraction script.
| Python | mit | HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper | from pathlib import Path
import wave
import h5py
DIR_PATH = Path('/Users/harold/Desktop/Clips')
INPUT_FILE_PATH = DIR_PATH / 'Clips.h5'
CLIP_COUNT = 5
def main():
with h5py.File(INPUT_FILE_PATH, 'r') as file_:
clip_group = file_['clips']
for i, clip_id in enumerate(clip_group):
if i == CLIP_COUNT:
break
samples, sample_rate = read_clip(clip_group, clip_id)
print(clip_id, len(samples), samples.dtype, sample_rate)
write_wave_file(clip_id, samples, sample_rate)
def read_clip(clip_group, clip_id):
clip = clip_group[clip_id]
samples = clip[:]
sample_rate = clip.attrs['sample_rate']
return samples, sample_rate
def write_wave_file(i, samples, sample_rate):
file_name = f'{i}.wav'
file_path = DIR_PATH / file_name
with wave.open(str(file_path), 'wb') as file_:
file_.setparams((1, 2, sample_rate, len(samples), 'NONE', ''))
file_.writeframes(samples.tobytes())
if __name__ == '__main__':
main()
Add attribute display to clip extraction script. | from pathlib import Path
import wave
import h5py
DIR_PATH = Path('/Users/harold/Desktop/Clips')
INPUT_FILE_PATH = DIR_PATH / 'Clips.h5'
CLIP_COUNT = 5
def main():
with h5py.File(INPUT_FILE_PATH, 'r') as file_:
clip_group = file_['clips']
for i, clip_id in enumerate(clip_group):
if i == CLIP_COUNT:
break
samples, attributes = read_clip(clip_group, clip_id)
show_clip(clip_id, samples, attributes)
write_wave_file(clip_id, samples, attributes['sample_rate'])
def read_clip(clip_group, clip_id):
clip = clip_group[clip_id]
samples = clip[:]
attributes = dict((name, value) for name, value in clip.attrs.items())
return samples, attributes
def show_clip(clip_id, samples, attributes):
print(f'clip {clip_id}:')
print(f' length: {len(samples)}')
print(' attributes:')
for key in sorted(attributes.keys()):
value = attributes[key]
print(f' {key}: {value}')
print()
def write_wave_file(i, samples, sample_rate):
file_name = f'{i}.wav'
file_path = DIR_PATH / file_name
with wave.open(str(file_path), 'wb') as file_:
file_.setparams((1, 2, sample_rate, len(samples), 'NONE', ''))
file_.writeframes(samples.tobytes())
if __name__ == '__main__':
main()
| <commit_before>from pathlib import Path
import wave
import h5py
DIR_PATH = Path('/Users/harold/Desktop/Clips')
INPUT_FILE_PATH = DIR_PATH / 'Clips.h5'
CLIP_COUNT = 5
def main():
with h5py.File(INPUT_FILE_PATH, 'r') as file_:
clip_group = file_['clips']
for i, clip_id in enumerate(clip_group):
if i == CLIP_COUNT:
break
samples, sample_rate = read_clip(clip_group, clip_id)
print(clip_id, len(samples), samples.dtype, sample_rate)
write_wave_file(clip_id, samples, sample_rate)
def read_clip(clip_group, clip_id):
clip = clip_group[clip_id]
samples = clip[:]
sample_rate = clip.attrs['sample_rate']
return samples, sample_rate
def write_wave_file(i, samples, sample_rate):
file_name = f'{i}.wav'
file_path = DIR_PATH / file_name
with wave.open(str(file_path), 'wb') as file_:
file_.setparams((1, 2, sample_rate, len(samples), 'NONE', ''))
file_.writeframes(samples.tobytes())
if __name__ == '__main__':
main()
<commit_msg>Add attribute display to clip extraction script.<commit_after> | from pathlib import Path
import wave
import h5py
DIR_PATH = Path('/Users/harold/Desktop/Clips')
INPUT_FILE_PATH = DIR_PATH / 'Clips.h5'
CLIP_COUNT = 5
def main():
with h5py.File(INPUT_FILE_PATH, 'r') as file_:
clip_group = file_['clips']
for i, clip_id in enumerate(clip_group):
if i == CLIP_COUNT:
break
samples, attributes = read_clip(clip_group, clip_id)
show_clip(clip_id, samples, attributes)
write_wave_file(clip_id, samples, attributes['sample_rate'])
def read_clip(clip_group, clip_id):
clip = clip_group[clip_id]
samples = clip[:]
attributes = dict((name, value) for name, value in clip.attrs.items())
return samples, attributes
def show_clip(clip_id, samples, attributes):
print(f'clip {clip_id}:')
print(f' length: {len(samples)}')
print(' attributes:')
for key in sorted(attributes.keys()):
value = attributes[key]
print(f' {key}: {value}')
print()
def write_wave_file(i, samples, sample_rate):
file_name = f'{i}.wav'
file_path = DIR_PATH / file_name
with wave.open(str(file_path), 'wb') as file_:
file_.setparams((1, 2, sample_rate, len(samples), 'NONE', ''))
file_.writeframes(samples.tobytes())
if __name__ == '__main__':
main()
| from pathlib import Path
import wave
import h5py
DIR_PATH = Path('/Users/harold/Desktop/Clips')
INPUT_FILE_PATH = DIR_PATH / 'Clips.h5'
CLIP_COUNT = 5
def main():
with h5py.File(INPUT_FILE_PATH, 'r') as file_:
clip_group = file_['clips']
for i, clip_id in enumerate(clip_group):
if i == CLIP_COUNT:
break
samples, sample_rate = read_clip(clip_group, clip_id)
print(clip_id, len(samples), samples.dtype, sample_rate)
write_wave_file(clip_id, samples, sample_rate)
def read_clip(clip_group, clip_id):
clip = clip_group[clip_id]
samples = clip[:]
sample_rate = clip.attrs['sample_rate']
return samples, sample_rate
def write_wave_file(i, samples, sample_rate):
file_name = f'{i}.wav'
file_path = DIR_PATH / file_name
with wave.open(str(file_path), 'wb') as file_:
file_.setparams((1, 2, sample_rate, len(samples), 'NONE', ''))
file_.writeframes(samples.tobytes())
if __name__ == '__main__':
main()
Add attribute display to clip extraction script.from pathlib import Path
import wave
import h5py
DIR_PATH = Path('/Users/harold/Desktop/Clips')
INPUT_FILE_PATH = DIR_PATH / 'Clips.h5'
CLIP_COUNT = 5
def main():
with h5py.File(INPUT_FILE_PATH, 'r') as file_:
clip_group = file_['clips']
for i, clip_id in enumerate(clip_group):
if i == CLIP_COUNT:
break
samples, attributes = read_clip(clip_group, clip_id)
show_clip(clip_id, samples, attributes)
write_wave_file(clip_id, samples, attributes['sample_rate'])
def read_clip(clip_group, clip_id):
clip = clip_group[clip_id]
samples = clip[:]
attributes = dict((name, value) for name, value in clip.attrs.items())
return samples, attributes
def show_clip(clip_id, samples, attributes):
print(f'clip {clip_id}:')
print(f' length: {len(samples)}')
print(' attributes:')
for key in sorted(attributes.keys()):
value = attributes[key]
print(f' {key}: {value}')
print()
def write_wave_file(i, samples, sample_rate):
file_name = f'{i}.wav'
file_path = DIR_PATH / file_name
with wave.open(str(file_path), 'wb') as file_:
file_.setparams((1, 2, sample_rate, len(samples), 'NONE', ''))
file_.writeframes(samples.tobytes())
if __name__ == '__main__':
main()
| <commit_before>from pathlib import Path
import wave
import h5py
DIR_PATH = Path('/Users/harold/Desktop/Clips')
INPUT_FILE_PATH = DIR_PATH / 'Clips.h5'
CLIP_COUNT = 5
def main():
with h5py.File(INPUT_FILE_PATH, 'r') as file_:
clip_group = file_['clips']
for i, clip_id in enumerate(clip_group):
if i == CLIP_COUNT:
break
samples, sample_rate = read_clip(clip_group, clip_id)
print(clip_id, len(samples), samples.dtype, sample_rate)
write_wave_file(clip_id, samples, sample_rate)
def read_clip(clip_group, clip_id):
clip = clip_group[clip_id]
samples = clip[:]
sample_rate = clip.attrs['sample_rate']
return samples, sample_rate
def write_wave_file(i, samples, sample_rate):
file_name = f'{i}.wav'
file_path = DIR_PATH / file_name
with wave.open(str(file_path), 'wb') as file_:
file_.setparams((1, 2, sample_rate, len(samples), 'NONE', ''))
file_.writeframes(samples.tobytes())
if __name__ == '__main__':
main()
<commit_msg>Add attribute display to clip extraction script.<commit_after>from pathlib import Path
import wave
import h5py
DIR_PATH = Path('/Users/harold/Desktop/Clips')
INPUT_FILE_PATH = DIR_PATH / 'Clips.h5'
CLIP_COUNT = 5
def main():
with h5py.File(INPUT_FILE_PATH, 'r') as file_:
clip_group = file_['clips']
for i, clip_id in enumerate(clip_group):
if i == CLIP_COUNT:
break
samples, attributes = read_clip(clip_group, clip_id)
show_clip(clip_id, samples, attributes)
write_wave_file(clip_id, samples, attributes['sample_rate'])
def read_clip(clip_group, clip_id):
clip = clip_group[clip_id]
samples = clip[:]
attributes = dict((name, value) for name, value in clip.attrs.items())
return samples, attributes
def show_clip(clip_id, samples, attributes):
print(f'clip {clip_id}:')
print(f' length: {len(samples)}')
print(' attributes:')
for key in sorted(attributes.keys()):
value = attributes[key]
print(f' {key}: {value}')
print()
def write_wave_file(i, samples, sample_rate):
file_name = f'{i}.wav'
file_path = DIR_PATH / file_name
with wave.open(str(file_path), 'wb') as file_:
file_.setparams((1, 2, sample_rate, len(samples), 'NONE', ''))
file_.writeframes(samples.tobytes())
if __name__ == '__main__':
main()
|
73856ac73abd9dc68909a67077c016d003888cdd | credentials/apps/records/migrations/0006_auto_20180718_1256.py | credentials/apps/records/migrations/0006_auto_20180718_1256.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-17 20:02
from __future__ import unicode_literals
from django.db import migrations
from credentials.apps.catalog.models import Program
from credentials.apps.records.models import ProgramCertRecord
def seed_program_cert_records(apps, schema_editor):
for pcr in ProgramCertRecord.objects.all():
pcr.program = Program.objects.get(uuid=pcr.certificate.program_uuid)
pcr.save()
class Migration(migrations.Migration):
dependencies = [
('records', '0005_auto_20180717_1953'),
]
operations = [
migrations.RunPython(seed_program_cert_records),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-17 20:02
from __future__ import unicode_literals
from django.db import migrations
from credentials.apps.catalog.models import Program
from credentials.apps.records.models import ProgramCertRecord
def seed_program_cert_records(apps, schema_editor):
for pcr in ProgramCertRecord.objects.all():
program_uuid = pcr.certificate.program_uuid
site = pcr.certificate.site
pcr.program = Program.objects.get(site=site, uuid=program_uuid)
pcr.save()
class Migration(migrations.Migration):
dependencies = [
('records', '0005_auto_20180717_1953'),
]
operations = [
migrations.RunPython(seed_program_cert_records),
]
| Add site guarding for ProgramCertRecord data migration | Add site guarding for ProgramCertRecord data migration
| Python | agpl-3.0 | edx/credentials,edx/credentials,edx/credentials,edx/credentials | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-17 20:02
from __future__ import unicode_literals
from django.db import migrations
from credentials.apps.catalog.models import Program
from credentials.apps.records.models import ProgramCertRecord
def seed_program_cert_records(apps, schema_editor):
for pcr in ProgramCertRecord.objects.all():
pcr.program = Program.objects.get(uuid=pcr.certificate.program_uuid)
pcr.save()
class Migration(migrations.Migration):
dependencies = [
('records', '0005_auto_20180717_1953'),
]
operations = [
migrations.RunPython(seed_program_cert_records),
]
Add site guarding for ProgramCertRecord data migration | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-17 20:02
from __future__ import unicode_literals
from django.db import migrations
from credentials.apps.catalog.models import Program
from credentials.apps.records.models import ProgramCertRecord
def seed_program_cert_records(apps, schema_editor):
for pcr in ProgramCertRecord.objects.all():
program_uuid = pcr.certificate.program_uuid
site = pcr.certificate.site
pcr.program = Program.objects.get(site=site, uuid=program_uuid)
pcr.save()
class Migration(migrations.Migration):
dependencies = [
('records', '0005_auto_20180717_1953'),
]
operations = [
migrations.RunPython(seed_program_cert_records),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-17 20:02
from __future__ import unicode_literals
from django.db import migrations
from credentials.apps.catalog.models import Program
from credentials.apps.records.models import ProgramCertRecord
def seed_program_cert_records(apps, schema_editor):
for pcr in ProgramCertRecord.objects.all():
pcr.program = Program.objects.get(uuid=pcr.certificate.program_uuid)
pcr.save()
class Migration(migrations.Migration):
dependencies = [
('records', '0005_auto_20180717_1953'),
]
operations = [
migrations.RunPython(seed_program_cert_records),
]
<commit_msg>Add site guarding for ProgramCertRecord data migration<commit_after> | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-17 20:02
from __future__ import unicode_literals
from django.db import migrations
from credentials.apps.catalog.models import Program
from credentials.apps.records.models import ProgramCertRecord
def seed_program_cert_records(apps, schema_editor):
for pcr in ProgramCertRecord.objects.all():
program_uuid = pcr.certificate.program_uuid
site = pcr.certificate.site
pcr.program = Program.objects.get(site=site, uuid=program_uuid)
pcr.save()
class Migration(migrations.Migration):
dependencies = [
('records', '0005_auto_20180717_1953'),
]
operations = [
migrations.RunPython(seed_program_cert_records),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-17 20:02
from __future__ import unicode_literals
from django.db import migrations
from credentials.apps.catalog.models import Program
from credentials.apps.records.models import ProgramCertRecord
def seed_program_cert_records(apps, schema_editor):
for pcr in ProgramCertRecord.objects.all():
pcr.program = Program.objects.get(uuid=pcr.certificate.program_uuid)
pcr.save()
class Migration(migrations.Migration):
dependencies = [
('records', '0005_auto_20180717_1953'),
]
operations = [
migrations.RunPython(seed_program_cert_records),
]
Add site guarding for ProgramCertRecord data migration# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-17 20:02
from __future__ import unicode_literals
from django.db import migrations
from credentials.apps.catalog.models import Program
from credentials.apps.records.models import ProgramCertRecord
def seed_program_cert_records(apps, schema_editor):
for pcr in ProgramCertRecord.objects.all():
program_uuid = pcr.certificate.program_uuid
site = pcr.certificate.site
pcr.program = Program.objects.get(site=site, uuid=program_uuid)
pcr.save()
class Migration(migrations.Migration):
dependencies = [
('records', '0005_auto_20180717_1953'),
]
operations = [
migrations.RunPython(seed_program_cert_records),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-17 20:02
from __future__ import unicode_literals
from django.db import migrations
from credentials.apps.catalog.models import Program
from credentials.apps.records.models import ProgramCertRecord
def seed_program_cert_records(apps, schema_editor):
for pcr in ProgramCertRecord.objects.all():
pcr.program = Program.objects.get(uuid=pcr.certificate.program_uuid)
pcr.save()
class Migration(migrations.Migration):
dependencies = [
('records', '0005_auto_20180717_1953'),
]
operations = [
migrations.RunPython(seed_program_cert_records),
]
<commit_msg>Add site guarding for ProgramCertRecord data migration<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-17 20:02
from __future__ import unicode_literals
from django.db import migrations
from credentials.apps.catalog.models import Program
from credentials.apps.records.models import ProgramCertRecord
def seed_program_cert_records(apps, schema_editor):
for pcr in ProgramCertRecord.objects.all():
program_uuid = pcr.certificate.program_uuid
site = pcr.certificate.site
pcr.program = Program.objects.get(site=site, uuid=program_uuid)
pcr.save()
class Migration(migrations.Migration):
dependencies = [
('records', '0005_auto_20180717_1953'),
]
operations = [
migrations.RunPython(seed_program_cert_records),
]
|
b50c72ad6200cc9f96e1b9eda03fba5d2d4999b9 | vesper/external_urls.py | vesper/external_urls.py | """
Functions that return external URLs, for example for the Vesper documentation.
"""
import vesper.version as vesper_version
_USE_LATEST_DOCUMENTATION_VERSION = True
"""Set this `True` during development, `False` for release."""
def _create_documentation_url():
if _USE_LATEST_DOCUMENTATION_VERSION:
doc_version = 'latest'
else:
doc_version = vesper_version.full_version
return 'https://vesper.readthedocs.io/en/' + doc_version + '/'
def _create_tutorial_url():
return _create_documentation_url() + 'tutorial.html'
documentation_url = _create_documentation_url()
tutorial_url = _create_tutorial_url()
source_code_url = 'https://github.com/HaroldMills/Vesper'
| """
Functions that return external URLs, for example for the Vesper documentation.
"""
import vesper.version as vesper_version
_USE_LATEST_DOCUMENTATION_VERSION = False
"""Set this `True` during development, `False` for release."""
def _create_documentation_url():
if _USE_LATEST_DOCUMENTATION_VERSION:
doc_version = 'latest'
else:
doc_version = vesper_version.full_version
return 'https://vesper.readthedocs.io/en/' + doc_version + '/'
def _create_tutorial_url():
return _create_documentation_url() + 'tutorial.html'
documentation_url = _create_documentation_url()
tutorial_url = _create_tutorial_url()
source_code_url = 'https://github.com/HaroldMills/Vesper'
| Change doc setting for release. | Change doc setting for release. | Python | mit | HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper | """
Functions that return external URLs, for example for the Vesper documentation.
"""
import vesper.version as vesper_version
_USE_LATEST_DOCUMENTATION_VERSION = True
"""Set this `True` during development, `False` for release."""
def _create_documentation_url():
if _USE_LATEST_DOCUMENTATION_VERSION:
doc_version = 'latest'
else:
doc_version = vesper_version.full_version
return 'https://vesper.readthedocs.io/en/' + doc_version + '/'
def _create_tutorial_url():
return _create_documentation_url() + 'tutorial.html'
documentation_url = _create_documentation_url()
tutorial_url = _create_tutorial_url()
source_code_url = 'https://github.com/HaroldMills/Vesper'
Change doc setting for release. | """
Functions that return external URLs, for example for the Vesper documentation.
"""
import vesper.version as vesper_version
_USE_LATEST_DOCUMENTATION_VERSION = False
"""Set this `True` during development, `False` for release."""
def _create_documentation_url():
if _USE_LATEST_DOCUMENTATION_VERSION:
doc_version = 'latest'
else:
doc_version = vesper_version.full_version
return 'https://vesper.readthedocs.io/en/' + doc_version + '/'
def _create_tutorial_url():
return _create_documentation_url() + 'tutorial.html'
documentation_url = _create_documentation_url()
tutorial_url = _create_tutorial_url()
source_code_url = 'https://github.com/HaroldMills/Vesper'
| <commit_before>"""
Functions that return external URLs, for example for the Vesper documentation.
"""
import vesper.version as vesper_version
_USE_LATEST_DOCUMENTATION_VERSION = True
"""Set this `True` during development, `False` for release."""
def _create_documentation_url():
if _USE_LATEST_DOCUMENTATION_VERSION:
doc_version = 'latest'
else:
doc_version = vesper_version.full_version
return 'https://vesper.readthedocs.io/en/' + doc_version + '/'
def _create_tutorial_url():
return _create_documentation_url() + 'tutorial.html'
documentation_url = _create_documentation_url()
tutorial_url = _create_tutorial_url()
source_code_url = 'https://github.com/HaroldMills/Vesper'
<commit_msg>Change doc setting for release.<commit_after> | """
Functions that return external URLs, for example for the Vesper documentation.
"""
import vesper.version as vesper_version
_USE_LATEST_DOCUMENTATION_VERSION = False
"""Set this `True` during development, `False` for release."""
def _create_documentation_url():
if _USE_LATEST_DOCUMENTATION_VERSION:
doc_version = 'latest'
else:
doc_version = vesper_version.full_version
return 'https://vesper.readthedocs.io/en/' + doc_version + '/'
def _create_tutorial_url():
return _create_documentation_url() + 'tutorial.html'
documentation_url = _create_documentation_url()
tutorial_url = _create_tutorial_url()
source_code_url = 'https://github.com/HaroldMills/Vesper'
| """
Functions that return external URLs, for example for the Vesper documentation.
"""
import vesper.version as vesper_version
_USE_LATEST_DOCUMENTATION_VERSION = True
"""Set this `True` during development, `False` for release."""
def _create_documentation_url():
if _USE_LATEST_DOCUMENTATION_VERSION:
doc_version = 'latest'
else:
doc_version = vesper_version.full_version
return 'https://vesper.readthedocs.io/en/' + doc_version + '/'
def _create_tutorial_url():
return _create_documentation_url() + 'tutorial.html'
documentation_url = _create_documentation_url()
tutorial_url = _create_tutorial_url()
source_code_url = 'https://github.com/HaroldMills/Vesper'
Change doc setting for release."""
Functions that return external URLs, for example for the Vesper documentation.
"""
import vesper.version as vesper_version
_USE_LATEST_DOCUMENTATION_VERSION = False
"""Set this `True` during development, `False` for release."""
def _create_documentation_url():
if _USE_LATEST_DOCUMENTATION_VERSION:
doc_version = 'latest'
else:
doc_version = vesper_version.full_version
return 'https://vesper.readthedocs.io/en/' + doc_version + '/'
def _create_tutorial_url():
return _create_documentation_url() + 'tutorial.html'
documentation_url = _create_documentation_url()
tutorial_url = _create_tutorial_url()
source_code_url = 'https://github.com/HaroldMills/Vesper'
| <commit_before>"""
Functions that return external URLs, for example for the Vesper documentation.
"""
import vesper.version as vesper_version
_USE_LATEST_DOCUMENTATION_VERSION = True
"""Set this `True` during development, `False` for release."""
def _create_documentation_url():
if _USE_LATEST_DOCUMENTATION_VERSION:
doc_version = 'latest'
else:
doc_version = vesper_version.full_version
return 'https://vesper.readthedocs.io/en/' + doc_version + '/'
def _create_tutorial_url():
return _create_documentation_url() + 'tutorial.html'
documentation_url = _create_documentation_url()
tutorial_url = _create_tutorial_url()
source_code_url = 'https://github.com/HaroldMills/Vesper'
<commit_msg>Change doc setting for release.<commit_after>"""
Functions that return external URLs, for example for the Vesper documentation.
"""
import vesper.version as vesper_version
_USE_LATEST_DOCUMENTATION_VERSION = False
"""Set this `True` during development, `False` for release."""
def _create_documentation_url():
if _USE_LATEST_DOCUMENTATION_VERSION:
doc_version = 'latest'
else:
doc_version = vesper_version.full_version
return 'https://vesper.readthedocs.io/en/' + doc_version + '/'
def _create_tutorial_url():
return _create_documentation_url() + 'tutorial.html'
documentation_url = _create_documentation_url()
tutorial_url = _create_tutorial_url()
source_code_url = 'https://github.com/HaroldMills/Vesper'
|
b0e101f523fd853392e65b1b30204a56e3ec34ec | tests/test_twitter.py | tests/test_twitter.py | # -*- coding: utf-8 -*-
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import CONSUMER_KEY, CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
| # -*- coding: utf-8 -*-
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
| Update access token variable names | Update access token variable names
| Python | mit | nestauk/inet | # -*- coding: utf-8 -*-
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import CONSUMER_KEY, CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
Update access token variable names | # -*- coding: utf-8 -*-
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
| <commit_before># -*- coding: utf-8 -*-
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import CONSUMER_KEY, CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
<commit_msg>Update access token variable names<commit_after> | # -*- coding: utf-8 -*-
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
| # -*- coding: utf-8 -*-
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import CONSUMER_KEY, CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
Update access token variable names# -*- coding: utf-8 -*-
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
| <commit_before># -*- coding: utf-8 -*-
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import CONSUMER_KEY, CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
<commit_msg>Update access token variable names<commit_after># -*- coding: utf-8 -*-
import pytest
import tweepy
import vcr
from secrets import TWITTER_ACCESS, TWITTER_SECRET
from secrets import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
class TestTweepyIntegration():
"""Test class to ensure tweepy functionality works as expected"""
# Class level client to use across tests
auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
@vcr.use_cassette('fixtures/vcr_cassettes/twitter.yaml')
def test_authd(self):
api = tweepy.API(self.auth)
assert api.verify_credentials() is not False
if __name__ == '__main__':
pytest.main()
|
d192bbc2f4e0d9d34c10b559a1007ebefd0ae7bc | kevin/playground/read.py | kevin/playground/read.py | """Quick script to read inputs.
"""
if __name__ == '__main__':
# Read the number of inputs
num_inputs = int(input("How many inputs? "))
assert num_inputs > 3, "At least 3 please."
print("Enter your {} inputs in the following form: inp1 inp2 ... inp{}".format(
num_inputs, num_inputs))
a = list(map(int, input().split()))
assert(len(a) == num_inputs)
print(a)
| """Quick script to read inputs.
"""
if __name__ == '__main__':
# Read the number of inputs
num_inputs = int(input("How many inputs? "))
assert num_inputs >= 3, "At least 3 please."
print("Enter your {} inputs in the following form: inp1 inp2 ... inp{}".format(
num_inputs, num_inputs))
a = list(map(int, input().split()))
assert(len(a) == num_inputs)
print(a)
| Fix bug with input number validation | Fix bug with input number validation
- Want at least 3 inputs
- Was forcing at least 4
- Change greater than to greater than or equal to
| Python | mit | kalyons11/kevin,kalyons11/kevin | """Quick script to read inputs.
"""
if __name__ == '__main__':
# Read the number of inputs
num_inputs = int(input("How many inputs? "))
assert num_inputs > 3, "At least 3 please."
print("Enter your {} inputs in the following form: inp1 inp2 ... inp{}".format(
num_inputs, num_inputs))
a = list(map(int, input().split()))
assert(len(a) == num_inputs)
print(a)
Fix bug with input number validation
- Want at least 3 inputs
- Was forcing at least 4
- Change greater than to greater than or equal to | """Quick script to read inputs.
"""
if __name__ == '__main__':
# Read the number of inputs
num_inputs = int(input("How many inputs? "))
assert num_inputs >= 3, "At least 3 please."
print("Enter your {} inputs in the following form: inp1 inp2 ... inp{}".format(
num_inputs, num_inputs))
a = list(map(int, input().split()))
assert(len(a) == num_inputs)
print(a)
| <commit_before>"""Quick script to read inputs.
"""
if __name__ == '__main__':
# Read the number of inputs
num_inputs = int(input("How many inputs? "))
assert num_inputs > 3, "At least 3 please."
print("Enter your {} inputs in the following form: inp1 inp2 ... inp{}".format(
num_inputs, num_inputs))
a = list(map(int, input().split()))
assert(len(a) == num_inputs)
print(a)
<commit_msg>Fix bug with input number validation
- Want at least 3 inputs
- Was forcing at least 4
- Change greater than to greater than or equal to<commit_after> | """Quick script to read inputs.
"""
if __name__ == '__main__':
# Read the number of inputs
num_inputs = int(input("How many inputs? "))
assert num_inputs >= 3, "At least 3 please."
print("Enter your {} inputs in the following form: inp1 inp2 ... inp{}".format(
num_inputs, num_inputs))
a = list(map(int, input().split()))
assert(len(a) == num_inputs)
print(a)
| """Quick script to read inputs.
"""
if __name__ == '__main__':
# Read the number of inputs
num_inputs = int(input("How many inputs? "))
assert num_inputs > 3, "At least 3 please."
print("Enter your {} inputs in the following form: inp1 inp2 ... inp{}".format(
num_inputs, num_inputs))
a = list(map(int, input().split()))
assert(len(a) == num_inputs)
print(a)
Fix bug with input number validation
- Want at least 3 inputs
- Was forcing at least 4
- Change greater than to greater than or equal to"""Quick script to read inputs.
"""
if __name__ == '__main__':
# Read the number of inputs
num_inputs = int(input("How many inputs? "))
assert num_inputs >= 3, "At least 3 please."
print("Enter your {} inputs in the following form: inp1 inp2 ... inp{}".format(
num_inputs, num_inputs))
a = list(map(int, input().split()))
assert(len(a) == num_inputs)
print(a)
| <commit_before>"""Quick script to read inputs.
"""
if __name__ == '__main__':
# Read the number of inputs
num_inputs = int(input("How many inputs? "))
assert num_inputs > 3, "At least 3 please."
print("Enter your {} inputs in the following form: inp1 inp2 ... inp{}".format(
num_inputs, num_inputs))
a = list(map(int, input().split()))
assert(len(a) == num_inputs)
print(a)
<commit_msg>Fix bug with input number validation
- Want at least 3 inputs
- Was forcing at least 4
- Change greater than to greater than or equal to<commit_after>"""Quick script to read inputs.
"""
if __name__ == '__main__':
# Read the number of inputs
num_inputs = int(input("How many inputs? "))
assert num_inputs >= 3, "At least 3 please."
print("Enter your {} inputs in the following form: inp1 inp2 ... inp{}".format(
num_inputs, num_inputs))
a = list(map(int, input().split()))
assert(len(a) == num_inputs)
print(a)
|
6cb215211bff754f531126ac44df03e761b3d7fc | pagerduty_events_api/tests/test_pagerduty_incident.py | pagerduty_events_api/tests/test_pagerduty_incident.py | from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_acknowledge_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.acknowledge()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'acknowledge',
'incident_key': 'my_incident_key'})
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_resolve_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.resolve()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'resolve',
'incident_key': 'my_incident_key'})
| from ddt import ddt, data, unpack
from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
@ddt
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@data('resolve', 'acknowledge')
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_should_make_appropriate_pagerduty_api_calls(self, action, post):
post.return_value = {}
getattr(self.__subject, action)()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': action,
'incident_key': 'my_incident_key'})
| Use data provider in PD incident tests. | Use data provider in PD incident tests.
| Python | mit | BlasiusVonSzerencsi/pagerduty-events-api | from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_acknowledge_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.acknowledge()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'acknowledge',
'incident_key': 'my_incident_key'})
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_resolve_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.resolve()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'resolve',
'incident_key': 'my_incident_key'})
Use data provider in PD incident tests. | from ddt import ddt, data, unpack
from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
@ddt
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@data('resolve', 'acknowledge')
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_should_make_appropriate_pagerduty_api_calls(self, action, post):
post.return_value = {}
getattr(self.__subject, action)()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': action,
'incident_key': 'my_incident_key'})
| <commit_before>from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_acknowledge_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.acknowledge()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'acknowledge',
'incident_key': 'my_incident_key'})
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_resolve_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.resolve()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'resolve',
'incident_key': 'my_incident_key'})
<commit_msg>Use data provider in PD incident tests.<commit_after> | from ddt import ddt, data, unpack
from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
@ddt
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@data('resolve', 'acknowledge')
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_should_make_appropriate_pagerduty_api_calls(self, action, post):
post.return_value = {}
getattr(self.__subject, action)()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': action,
'incident_key': 'my_incident_key'})
| from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_acknowledge_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.acknowledge()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'acknowledge',
'incident_key': 'my_incident_key'})
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_resolve_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.resolve()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'resolve',
'incident_key': 'my_incident_key'})
Use data provider in PD incident tests.from ddt import ddt, data, unpack
from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
@ddt
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@data('resolve', 'acknowledge')
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_should_make_appropriate_pagerduty_api_calls(self, action, post):
post.return_value = {}
getattr(self.__subject, action)()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': action,
'incident_key': 'my_incident_key'})
| <commit_before>from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_acknowledge_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.acknowledge()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'acknowledge',
'incident_key': 'my_incident_key'})
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_resolve_should_make_pagerduty_api_call(self, post):
post.return_value = {}
self.__subject.resolve()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': 'resolve',
'incident_key': 'my_incident_key'})
<commit_msg>Use data provider in PD incident tests.<commit_after>from ddt import ddt, data, unpack
from unittest import TestCase
from unittest.mock import patch
from pagerduty_events_api import PagerdutyIncident
@ddt
class TestPagerdutyIncident(TestCase):
def setUp(self):
super().setUp()
self.__subject = PagerdutyIncident('my_service_key', 'my_incident_key')
def test_get_service_key_should_return_the_service_key(self):
self.assertEqual('my_service_key', self.__subject.get_service_key())
def test_get_incident_key_should_return_the_incident_key(self):
self.assertEqual('my_incident_key', self.__subject.get_incident_key())
@data('resolve', 'acknowledge')
@patch('pagerduty_events_api.pagerduty_rest_client.PagerdutyRestClient.post')
def test_should_make_appropriate_pagerduty_api_calls(self, action, post):
post.return_value = {}
getattr(self.__subject, action)()
post.assert_called_once_with({'service_key': 'my_service_key',
'event_type': action,
'incident_key': 'my_incident_key'})
|
8bdf971c3ddbe6f106e788b5a2effebad6c30ec5 | geotrek/settings/env_dev.py | geotrek/settings/env_dev.py | #
# Django Development
# ..........................
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#
# Developper additions
# ..........................
INSTALLED_APPS = (
'django_extensions',
'debug_toolbar',
) + INSTALLED_APPS
INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})()
ALLOWED_HOSTS = ['*']
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
#
# Use some default tiles
# ..........................
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
| #
# Django Development
# ..........................
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#
# Developper additions
# ..........................
INSTALLED_APPS = (
'django_extensions',
'debug_toolbar',
'drf_yasg',
) + INSTALLED_APPS
INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})()
ALLOWED_HOSTS = ['*']
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
#
# Use some default tiles
# ..........................
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
| Add drf_yasg module for dev env | Add drf_yasg module for dev env
| Python | bsd-2-clause | makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin | #
# Django Development
# ..........................
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#
# Developper additions
# ..........................
INSTALLED_APPS = (
'django_extensions',
'debug_toolbar',
) + INSTALLED_APPS
INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})()
ALLOWED_HOSTS = ['*']
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
#
# Use some default tiles
# ..........................
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
Add drf_yasg module for dev env | #
# Django Development
# ..........................
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#
# Developper additions
# ..........................
INSTALLED_APPS = (
'django_extensions',
'debug_toolbar',
'drf_yasg',
) + INSTALLED_APPS
INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})()
ALLOWED_HOSTS = ['*']
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
#
# Use some default tiles
# ..........................
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
| <commit_before>#
# Django Development
# ..........................
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#
# Developper additions
# ..........................
INSTALLED_APPS = (
'django_extensions',
'debug_toolbar',
) + INSTALLED_APPS
INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})()
ALLOWED_HOSTS = ['*']
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
#
# Use some default tiles
# ..........................
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
<commit_msg>Add drf_yasg module for dev env<commit_after> | #
# Django Development
# ..........................
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#
# Developper additions
# ..........................
INSTALLED_APPS = (
'django_extensions',
'debug_toolbar',
'drf_yasg',
) + INSTALLED_APPS
INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})()
ALLOWED_HOSTS = ['*']
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
#
# Use some default tiles
# ..........................
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
| #
# Django Development
# ..........................
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#
# Developper additions
# ..........................
INSTALLED_APPS = (
'django_extensions',
'debug_toolbar',
) + INSTALLED_APPS
INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})()
ALLOWED_HOSTS = ['*']
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
#
# Use some default tiles
# ..........................
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
Add drf_yasg module for dev env#
# Django Development
# ..........................
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#
# Developper additions
# ..........................
INSTALLED_APPS = (
'django_extensions',
'debug_toolbar',
'drf_yasg',
) + INSTALLED_APPS
INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})()
ALLOWED_HOSTS = ['*']
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
#
# Use some default tiles
# ..........................
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
| <commit_before>#
# Django Development
# ..........................
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#
# Developper additions
# ..........................
INSTALLED_APPS = (
'django_extensions',
'debug_toolbar',
) + INSTALLED_APPS
INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})()
ALLOWED_HOSTS = ['*']
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
#
# Use some default tiles
# ..........................
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
<commit_msg>Add drf_yasg module for dev env<commit_after>#
# Django Development
# ..........................
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#
# Developper additions
# ..........................
INSTALLED_APPS = (
'django_extensions',
'debug_toolbar',
'drf_yasg',
) + INSTALLED_APPS
INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})()
ALLOWED_HOSTS = ['*']
MIDDLEWARE += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
#
# Use some default tiles
# ..........................
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
|
dbfe5fcb87762d68580756d6466bc61fa8ab4a56 | histomicstk/preprocessing/color_deconvolution/utils.py | histomicstk/preprocessing/color_deconvolution/utils.py | import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1, 2, 3.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T
__all__ = (
'get_stain_vector',
)
| import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args, count=3):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1 to count.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T
__all__ = (
'get_stain_vector',
)
| Enhance get_stain_matrix to take any desired number of vectors | Enhance get_stain_matrix to take any desired number of vectors
| Python | apache-2.0 | DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK | import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1, 2, 3.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T
__all__ = (
'get_stain_vector',
)
Enhance get_stain_matrix to take any desired number of vectors | import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args, count=3):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1 to count.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T
__all__ = (
'get_stain_vector',
)
| <commit_before>import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1, 2, 3.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T
__all__ = (
'get_stain_vector',
)
<commit_msg>Enhance get_stain_matrix to take any desired number of vectors<commit_after> | import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args, count=3):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1 to count.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T
__all__ = (
'get_stain_vector',
)
| import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1, 2, 3.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T
__all__ = (
'get_stain_vector',
)
Enhance get_stain_matrix to take any desired number of vectorsimport numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args, count=3):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1 to count.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T
__all__ = (
'get_stain_vector',
)
| <commit_before>import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1, 2, 3.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i) for i in 1, 2, 3]).T
__all__ = (
'get_stain_vector',
)
<commit_msg>Enhance get_stain_matrix to take any desired number of vectors<commit_after>import numpy
from .stain_color_map import stain_color_map
def get_stain_vector(args, index):
"""Get the stain corresponding to args.stain_$index and
args.stain_$index_vector. If the former is not "custom", the
latter must be None.
"""
args = vars(args)
stain = args['stain_' + str(index)]
stain_vector = args['stain_' + str(index) + '_vector']
if stain == 'custom':
if stain_vector is None:
raise ValueError('If "custom" is chosen for a stain, '
'a stain vector must be provided.')
return stain_vector
else:
if stain_vector is None:
return stain_color_map[stain]
raise ValueError('Unless "custom" is chosen for a stain, '
'no stain vector may be provided.')
def get_stain_matrix(args, count=3):
"""Get the stain matrix corresponding to the args.stain_$index and
args.stain_$index_vector arguments for values of index 1 to count.
Return a numpy array of column vectors.
"""
return numpy.array([get_stain_vector(args, i+1) for i in range(count)]).T
__all__ = (
'get_stain_vector',
)
|
05094307c2f49b7a6207ddaa049ac79e759c03da | lily/users/authentication/social_auth/providers/google.py | lily/users/authentication/social_auth/providers/google.py | from django.conf import settings
from ..exceptions import InvalidProfileError
from .base import BaseAuthProvider
class GoogleAuthProvider(BaseAuthProvider):
client_id = settings.SOCIAL_AUTH_GOOGLE_CLIENT_ID
client_secret = settings.SOCIAL_AUTH_GOOGLE_SECRET
scope = [
'https://www.googleapis.com/auth/plus.me',
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/userinfo.profile'
]
auth_uri = 'https://accounts.google.com/o/oauth2/v2/auth'
token_uri = 'https://www.googleapis.com/oauth2/v4/token'
jwks_uri = 'https://www.googleapis.com/oauth2/v3/certs'
def parse_profile(self, session, token):
id_token = token['id_token']
email = id_token.get('email', '')
if not email or not id_token.get('email_verified', False):
raise InvalidProfileError()
picture = self.get_picture(session=session, url=id_token.get('picture', ''))
language = self.get_language(id_token.get('locale', ''))
return {
'email': email,
'picture': picture,
'first_name': id_token.get('given_name', ''),
'last_name': id_token.get('family_name', ''),
'language': language,
}
| from django.conf import settings
from ..exceptions import InvalidProfileError
from .base import BaseAuthProvider
class GoogleAuthProvider(BaseAuthProvider):
client_id = settings.SOCIAL_AUTH_GOOGLE_CLIENT_ID
client_secret = settings.SOCIAL_AUTH_GOOGLE_SECRET
scope = [
'openid',
'email',
'profile'
]
auth_uri = 'https://accounts.google.com/o/oauth2/v2/auth'
token_uri = 'https://www.googleapis.com/oauth2/v4/token'
jwks_uri = 'https://www.googleapis.com/oauth2/v3/certs'
def parse_profile(self, session, token):
id_token = token['id_token']
email = id_token.get('email', '')
if not email or not id_token.get('email_verified', False):
raise InvalidProfileError()
picture = self.get_picture(session=session, url=id_token.get('picture', ''))
language = self.get_language(id_token.get('locale', ''))
return {
'email': email,
'picture': picture,
'first_name': id_token.get('given_name', ''),
'last_name': id_token.get('family_name', ''),
'language': language,
}
| Fix deprecation of social OAuth scopes | LILY-3349: Fix deprecation of social OAuth scopes
| Python | agpl-3.0 | HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily | from django.conf import settings
from ..exceptions import InvalidProfileError
from .base import BaseAuthProvider
class GoogleAuthProvider(BaseAuthProvider):
client_id = settings.SOCIAL_AUTH_GOOGLE_CLIENT_ID
client_secret = settings.SOCIAL_AUTH_GOOGLE_SECRET
scope = [
'https://www.googleapis.com/auth/plus.me',
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/userinfo.profile'
]
auth_uri = 'https://accounts.google.com/o/oauth2/v2/auth'
token_uri = 'https://www.googleapis.com/oauth2/v4/token'
jwks_uri = 'https://www.googleapis.com/oauth2/v3/certs'
def parse_profile(self, session, token):
id_token = token['id_token']
email = id_token.get('email', '')
if not email or not id_token.get('email_verified', False):
raise InvalidProfileError()
picture = self.get_picture(session=session, url=id_token.get('picture', ''))
language = self.get_language(id_token.get('locale', ''))
return {
'email': email,
'picture': picture,
'first_name': id_token.get('given_name', ''),
'last_name': id_token.get('family_name', ''),
'language': language,
}
LILY-3349: Fix deprecation of social OAuth scopes | from django.conf import settings
from ..exceptions import InvalidProfileError
from .base import BaseAuthProvider
class GoogleAuthProvider(BaseAuthProvider):
client_id = settings.SOCIAL_AUTH_GOOGLE_CLIENT_ID
client_secret = settings.SOCIAL_AUTH_GOOGLE_SECRET
scope = [
'openid',
'email',
'profile'
]
auth_uri = 'https://accounts.google.com/o/oauth2/v2/auth'
token_uri = 'https://www.googleapis.com/oauth2/v4/token'
jwks_uri = 'https://www.googleapis.com/oauth2/v3/certs'
def parse_profile(self, session, token):
id_token = token['id_token']
email = id_token.get('email', '')
if not email or not id_token.get('email_verified', False):
raise InvalidProfileError()
picture = self.get_picture(session=session, url=id_token.get('picture', ''))
language = self.get_language(id_token.get('locale', ''))
return {
'email': email,
'picture': picture,
'first_name': id_token.get('given_name', ''),
'last_name': id_token.get('family_name', ''),
'language': language,
}
| <commit_before>from django.conf import settings
from ..exceptions import InvalidProfileError
from .base import BaseAuthProvider
class GoogleAuthProvider(BaseAuthProvider):
client_id = settings.SOCIAL_AUTH_GOOGLE_CLIENT_ID
client_secret = settings.SOCIAL_AUTH_GOOGLE_SECRET
scope = [
'https://www.googleapis.com/auth/plus.me',
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/userinfo.profile'
]
auth_uri = 'https://accounts.google.com/o/oauth2/v2/auth'
token_uri = 'https://www.googleapis.com/oauth2/v4/token'
jwks_uri = 'https://www.googleapis.com/oauth2/v3/certs'
def parse_profile(self, session, token):
id_token = token['id_token']
email = id_token.get('email', '')
if not email or not id_token.get('email_verified', False):
raise InvalidProfileError()
picture = self.get_picture(session=session, url=id_token.get('picture', ''))
language = self.get_language(id_token.get('locale', ''))
return {
'email': email,
'picture': picture,
'first_name': id_token.get('given_name', ''),
'last_name': id_token.get('family_name', ''),
'language': language,
}
<commit_msg>LILY-3349: Fix deprecation of social OAuth scopes<commit_after> | from django.conf import settings
from ..exceptions import InvalidProfileError
from .base import BaseAuthProvider
class GoogleAuthProvider(BaseAuthProvider):
client_id = settings.SOCIAL_AUTH_GOOGLE_CLIENT_ID
client_secret = settings.SOCIAL_AUTH_GOOGLE_SECRET
scope = [
'openid',
'email',
'profile'
]
auth_uri = 'https://accounts.google.com/o/oauth2/v2/auth'
token_uri = 'https://www.googleapis.com/oauth2/v4/token'
jwks_uri = 'https://www.googleapis.com/oauth2/v3/certs'
def parse_profile(self, session, token):
id_token = token['id_token']
email = id_token.get('email', '')
if not email or not id_token.get('email_verified', False):
raise InvalidProfileError()
picture = self.get_picture(session=session, url=id_token.get('picture', ''))
language = self.get_language(id_token.get('locale', ''))
return {
'email': email,
'picture': picture,
'first_name': id_token.get('given_name', ''),
'last_name': id_token.get('family_name', ''),
'language': language,
}
| from django.conf import settings
from ..exceptions import InvalidProfileError
from .base import BaseAuthProvider
class GoogleAuthProvider(BaseAuthProvider):
client_id = settings.SOCIAL_AUTH_GOOGLE_CLIENT_ID
client_secret = settings.SOCIAL_AUTH_GOOGLE_SECRET
scope = [
'https://www.googleapis.com/auth/plus.me',
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/userinfo.profile'
]
auth_uri = 'https://accounts.google.com/o/oauth2/v2/auth'
token_uri = 'https://www.googleapis.com/oauth2/v4/token'
jwks_uri = 'https://www.googleapis.com/oauth2/v3/certs'
def parse_profile(self, session, token):
id_token = token['id_token']
email = id_token.get('email', '')
if not email or not id_token.get('email_verified', False):
raise InvalidProfileError()
picture = self.get_picture(session=session, url=id_token.get('picture', ''))
language = self.get_language(id_token.get('locale', ''))
return {
'email': email,
'picture': picture,
'first_name': id_token.get('given_name', ''),
'last_name': id_token.get('family_name', ''),
'language': language,
}
LILY-3349: Fix deprecation of social OAuth scopesfrom django.conf import settings
from ..exceptions import InvalidProfileError
from .base import BaseAuthProvider
class GoogleAuthProvider(BaseAuthProvider):
client_id = settings.SOCIAL_AUTH_GOOGLE_CLIENT_ID
client_secret = settings.SOCIAL_AUTH_GOOGLE_SECRET
scope = [
'openid',
'email',
'profile'
]
auth_uri = 'https://accounts.google.com/o/oauth2/v2/auth'
token_uri = 'https://www.googleapis.com/oauth2/v4/token'
jwks_uri = 'https://www.googleapis.com/oauth2/v3/certs'
def parse_profile(self, session, token):
id_token = token['id_token']
email = id_token.get('email', '')
if not email or not id_token.get('email_verified', False):
raise InvalidProfileError()
picture = self.get_picture(session=session, url=id_token.get('picture', ''))
language = self.get_language(id_token.get('locale', ''))
return {
'email': email,
'picture': picture,
'first_name': id_token.get('given_name', ''),
'last_name': id_token.get('family_name', ''),
'language': language,
}
| <commit_before>from django.conf import settings
from ..exceptions import InvalidProfileError
from .base import BaseAuthProvider
class GoogleAuthProvider(BaseAuthProvider):
client_id = settings.SOCIAL_AUTH_GOOGLE_CLIENT_ID
client_secret = settings.SOCIAL_AUTH_GOOGLE_SECRET
scope = [
'https://www.googleapis.com/auth/plus.me',
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/userinfo.profile'
]
auth_uri = 'https://accounts.google.com/o/oauth2/v2/auth'
token_uri = 'https://www.googleapis.com/oauth2/v4/token'
jwks_uri = 'https://www.googleapis.com/oauth2/v3/certs'
def parse_profile(self, session, token):
id_token = token['id_token']
email = id_token.get('email', '')
if not email or not id_token.get('email_verified', False):
raise InvalidProfileError()
picture = self.get_picture(session=session, url=id_token.get('picture', ''))
language = self.get_language(id_token.get('locale', ''))
return {
'email': email,
'picture': picture,
'first_name': id_token.get('given_name', ''),
'last_name': id_token.get('family_name', ''),
'language': language,
}
<commit_msg>LILY-3349: Fix deprecation of social OAuth scopes<commit_after>from django.conf import settings
from ..exceptions import InvalidProfileError
from .base import BaseAuthProvider
class GoogleAuthProvider(BaseAuthProvider):
client_id = settings.SOCIAL_AUTH_GOOGLE_CLIENT_ID
client_secret = settings.SOCIAL_AUTH_GOOGLE_SECRET
scope = [
'openid',
'email',
'profile'
]
auth_uri = 'https://accounts.google.com/o/oauth2/v2/auth'
token_uri = 'https://www.googleapis.com/oauth2/v4/token'
jwks_uri = 'https://www.googleapis.com/oauth2/v3/certs'
def parse_profile(self, session, token):
id_token = token['id_token']
email = id_token.get('email', '')
if not email or not id_token.get('email_verified', False):
raise InvalidProfileError()
picture = self.get_picture(session=session, url=id_token.get('picture', ''))
language = self.get_language(id_token.get('locale', ''))
return {
'email': email,
'picture': picture,
'first_name': id_token.get('given_name', ''),
'last_name': id_token.get('family_name', ''),
'language': language,
}
|
6ec656a4ab0a255bad85c3157a045849da001352 | ggplot/utils/date_breaks.py | ggplot/utils/date_breaks.py | from matplotlib.dates import DayLocator, WeekdayLocator, MonthLocator, YearLocator
def parse_break_str(txt):
"parses '10 weeks' into tuple (10, week)."
txt = txt.strip()
if len(txt.split()) == 2:
n, units = txt.split()
else:
n,units = 1, txt
units = units.rstrip('s') # e.g. weeks => week
n = int(n)
return n, units
# matplotlib's YearLocator uses different named
# arguments than the others
LOCATORS = {
'day': DayLocator,
'week': WeekdayLocator,
'month': MonthLocator,
'year': lambda interval: YearLocator(base=interval)
}
def date_breaks(width):
"""
"Regularly spaced dates."
width:
an interval specification. must be one of [day, week, month, year]
usage:
date_breaks(width = '1 year')
date_breaks(width = '6 weeks')
date_breaks('months')
"""
period, units = parse_break_str(width)
Locator = LOCATORS.get(units)
locator = Locator(interval=period)
return locator
| from matplotlib.dates import MinuteLocator, HourLocator, DayLocator
from matplotlib.dates import WeekdayLocator, MonthLocator, YearLocator
def parse_break_str(txt):
"parses '10 weeks' into tuple (10, week)."
txt = txt.strip()
if len(txt.split()) == 2:
n, units = txt.split()
else:
n,units = 1, txt
units = units.rstrip('s') # e.g. weeks => week
n = int(n)
return n, units
# matplotlib's YearLocator uses different named
# arguments than the others
LOCATORS = {
'minute': MinuteLocator,
'hour': HourLocator,
'day': DayLocator,
'week': WeekdayLocator,
'month': MonthLocator,
'year': lambda interval: YearLocator(base=interval)
}
def date_breaks(width):
"""
"Regularly spaced dates."
width:
an interval specification. must be one of [minute, hour, day, week, month, year]
usage:
date_breaks(width = '1 year')
date_breaks(width = '6 weeks')
date_breaks('months')
"""
period, units = parse_break_str(width)
Locator = LOCATORS.get(units)
locator = Locator(interval=period)
return locator
| Add more granular date locators | Add more granular date locators
| Python | mit | has2k1/plotnine,has2k1/plotnine | from matplotlib.dates import DayLocator, WeekdayLocator, MonthLocator, YearLocator
def parse_break_str(txt):
"parses '10 weeks' into tuple (10, week)."
txt = txt.strip()
if len(txt.split()) == 2:
n, units = txt.split()
else:
n,units = 1, txt
units = units.rstrip('s') # e.g. weeks => week
n = int(n)
return n, units
# matplotlib's YearLocator uses different named
# arguments than the others
LOCATORS = {
'day': DayLocator,
'week': WeekdayLocator,
'month': MonthLocator,
'year': lambda interval: YearLocator(base=interval)
}
def date_breaks(width):
"""
"Regularly spaced dates."
width:
an interval specification. must be one of [day, week, month, year]
usage:
date_breaks(width = '1 year')
date_breaks(width = '6 weeks')
date_breaks('months')
"""
period, units = parse_break_str(width)
Locator = LOCATORS.get(units)
locator = Locator(interval=period)
return locator
Add more granular date locators | from matplotlib.dates import MinuteLocator, HourLocator, DayLocator
from matplotlib.dates import WeekdayLocator, MonthLocator, YearLocator
def parse_break_str(txt):
"parses '10 weeks' into tuple (10, week)."
txt = txt.strip()
if len(txt.split()) == 2:
n, units = txt.split()
else:
n,units = 1, txt
units = units.rstrip('s') # e.g. weeks => week
n = int(n)
return n, units
# matplotlib's YearLocator uses different named
# arguments than the others
LOCATORS = {
'minute': MinuteLocator,
'hour': HourLocator,
'day': DayLocator,
'week': WeekdayLocator,
'month': MonthLocator,
'year': lambda interval: YearLocator(base=interval)
}
def date_breaks(width):
"""
"Regularly spaced dates."
width:
an interval specification. must be one of [minute, hour, day, week, month, year]
usage:
date_breaks(width = '1 year')
date_breaks(width = '6 weeks')
date_breaks('months')
"""
period, units = parse_break_str(width)
Locator = LOCATORS.get(units)
locator = Locator(interval=period)
return locator
| <commit_before>from matplotlib.dates import DayLocator, WeekdayLocator, MonthLocator, YearLocator
def parse_break_str(txt):
"parses '10 weeks' into tuple (10, week)."
txt = txt.strip()
if len(txt.split()) == 2:
n, units = txt.split()
else:
n,units = 1, txt
units = units.rstrip('s') # e.g. weeks => week
n = int(n)
return n, units
# matplotlib's YearLocator uses different named
# arguments than the others
LOCATORS = {
'day': DayLocator,
'week': WeekdayLocator,
'month': MonthLocator,
'year': lambda interval: YearLocator(base=interval)
}
def date_breaks(width):
"""
"Regularly spaced dates."
width:
an interval specification. must be one of [day, week, month, year]
usage:
date_breaks(width = '1 year')
date_breaks(width = '6 weeks')
date_breaks('months')
"""
period, units = parse_break_str(width)
Locator = LOCATORS.get(units)
locator = Locator(interval=period)
return locator
<commit_msg>Add more granular date locators<commit_after> | from matplotlib.dates import MinuteLocator, HourLocator, DayLocator
from matplotlib.dates import WeekdayLocator, MonthLocator, YearLocator
def parse_break_str(txt):
"parses '10 weeks' into tuple (10, week)."
txt = txt.strip()
if len(txt.split()) == 2:
n, units = txt.split()
else:
n,units = 1, txt
units = units.rstrip('s') # e.g. weeks => week
n = int(n)
return n, units
# matplotlib's YearLocator uses different named
# arguments than the others
LOCATORS = {
'minute': MinuteLocator,
'hour': HourLocator,
'day': DayLocator,
'week': WeekdayLocator,
'month': MonthLocator,
'year': lambda interval: YearLocator(base=interval)
}
def date_breaks(width):
"""
"Regularly spaced dates."
width:
an interval specification. must be one of [minute, hour, day, week, month, year]
usage:
date_breaks(width = '1 year')
date_breaks(width = '6 weeks')
date_breaks('months')
"""
period, units = parse_break_str(width)
Locator = LOCATORS.get(units)
locator = Locator(interval=period)
return locator
| from matplotlib.dates import DayLocator, WeekdayLocator, MonthLocator, YearLocator
def parse_break_str(txt):
"parses '10 weeks' into tuple (10, week)."
txt = txt.strip()
if len(txt.split()) == 2:
n, units = txt.split()
else:
n,units = 1, txt
units = units.rstrip('s') # e.g. weeks => week
n = int(n)
return n, units
# matplotlib's YearLocator uses different named
# arguments than the others
LOCATORS = {
'day': DayLocator,
'week': WeekdayLocator,
'month': MonthLocator,
'year': lambda interval: YearLocator(base=interval)
}
def date_breaks(width):
"""
"Regularly spaced dates."
width:
an interval specification. must be one of [day, week, month, year]
usage:
date_breaks(width = '1 year')
date_breaks(width = '6 weeks')
date_breaks('months')
"""
period, units = parse_break_str(width)
Locator = LOCATORS.get(units)
locator = Locator(interval=period)
return locator
Add more granular date locatorsfrom matplotlib.dates import MinuteLocator, HourLocator, DayLocator
from matplotlib.dates import WeekdayLocator, MonthLocator, YearLocator
def parse_break_str(txt):
"parses '10 weeks' into tuple (10, week)."
txt = txt.strip()
if len(txt.split()) == 2:
n, units = txt.split()
else:
n,units = 1, txt
units = units.rstrip('s') # e.g. weeks => week
n = int(n)
return n, units
# matplotlib's YearLocator uses different named
# arguments than the others
LOCATORS = {
'minute': MinuteLocator,
'hour': HourLocator,
'day': DayLocator,
'week': WeekdayLocator,
'month': MonthLocator,
'year': lambda interval: YearLocator(base=interval)
}
def date_breaks(width):
"""
"Regularly spaced dates."
width:
an interval specification. must be one of [minute, hour, day, week, month, year]
usage:
date_breaks(width = '1 year')
date_breaks(width = '6 weeks')
date_breaks('months')
"""
period, units = parse_break_str(width)
Locator = LOCATORS.get(units)
locator = Locator(interval=period)
return locator
| <commit_before>from matplotlib.dates import DayLocator, WeekdayLocator, MonthLocator, YearLocator
def parse_break_str(txt):
"parses '10 weeks' into tuple (10, week)."
txt = txt.strip()
if len(txt.split()) == 2:
n, units = txt.split()
else:
n,units = 1, txt
units = units.rstrip('s') # e.g. weeks => week
n = int(n)
return n, units
# matplotlib's YearLocator uses different named
# arguments than the others
LOCATORS = {
'day': DayLocator,
'week': WeekdayLocator,
'month': MonthLocator,
'year': lambda interval: YearLocator(base=interval)
}
def date_breaks(width):
"""
"Regularly spaced dates."
width:
an interval specification. must be one of [day, week, month, year]
usage:
date_breaks(width = '1 year')
date_breaks(width = '6 weeks')
date_breaks('months')
"""
period, units = parse_break_str(width)
Locator = LOCATORS.get(units)
locator = Locator(interval=period)
return locator
<commit_msg>Add more granular date locators<commit_after>from matplotlib.dates import MinuteLocator, HourLocator, DayLocator
from matplotlib.dates import WeekdayLocator, MonthLocator, YearLocator
def parse_break_str(txt):
"parses '10 weeks' into tuple (10, week)."
txt = txt.strip()
if len(txt.split()) == 2:
n, units = txt.split()
else:
n,units = 1, txt
units = units.rstrip('s') # e.g. weeks => week
n = int(n)
return n, units
# matplotlib's YearLocator uses different named
# arguments than the others
LOCATORS = {
'minute': MinuteLocator,
'hour': HourLocator,
'day': DayLocator,
'week': WeekdayLocator,
'month': MonthLocator,
'year': lambda interval: YearLocator(base=interval)
}
def date_breaks(width):
"""
"Regularly spaced dates."
width:
an interval specification. must be one of [minute, hour, day, week, month, year]
usage:
date_breaks(width = '1 year')
date_breaks(width = '6 weeks')
date_breaks('months')
"""
period, units = parse_break_str(width)
Locator = LOCATORS.get(units)
locator = Locator(interval=period)
return locator
|
ef62cec8673f255dd9ce909d23a877ba93bd6bf5 | voidpp_tools/json_config.py | voidpp_tools/json_config.py |
import os
import json
class JSONConfigLoader():
def __init__(self):
self.sources = [
os.path.dirname(os.getcwd()),
os.path.dirname(os.path.abspath(__file__)),
os.path.expanduser('~'),
'/etc',
]
def load(self, filename):
for source in self.sources:
file_path = os.path.join(source, filename)
if not os.path.exists(file_path):
continue
with open(file_path) as f:
return json.load(f)
return None
|
import os
import json
class JSONConfigLoader():
def __init__(self, base_path):
self.sources = [
os.path.dirname(os.getcwd()),
os.path.dirname(os.path.abspath(base_path)),
os.path.expanduser('~'),
'/etc',
]
def load(self, filename):
tries = []
for source in self.sources:
file_path = os.path.join(source, filename)
tries.append(file_path)
if not os.path.exists(file_path):
continue
with open(file_path) as f:
return json.load(f)
raise Exception("Config file not found in: %s" % tries)
| Raise exception in case of config load error | Raise exception in case of config load error
| Python | mit | voidpp/python-tools |
import os
import json
class JSONConfigLoader():
def __init__(self):
self.sources = [
os.path.dirname(os.getcwd()),
os.path.dirname(os.path.abspath(__file__)),
os.path.expanduser('~'),
'/etc',
]
def load(self, filename):
for source in self.sources:
file_path = os.path.join(source, filename)
if not os.path.exists(file_path):
continue
with open(file_path) as f:
return json.load(f)
return None
Raise exception in case of config load error |
import os
import json
class JSONConfigLoader():
def __init__(self, base_path):
self.sources = [
os.path.dirname(os.getcwd()),
os.path.dirname(os.path.abspath(base_path)),
os.path.expanduser('~'),
'/etc',
]
def load(self, filename):
tries = []
for source in self.sources:
file_path = os.path.join(source, filename)
tries.append(file_path)
if not os.path.exists(file_path):
continue
with open(file_path) as f:
return json.load(f)
raise Exception("Config file not found in: %s" % tries)
| <commit_before>
import os
import json
class JSONConfigLoader():
def __init__(self):
self.sources = [
os.path.dirname(os.getcwd()),
os.path.dirname(os.path.abspath(__file__)),
os.path.expanduser('~'),
'/etc',
]
def load(self, filename):
for source in self.sources:
file_path = os.path.join(source, filename)
if not os.path.exists(file_path):
continue
with open(file_path) as f:
return json.load(f)
return None
<commit_msg>Raise exception in case of config load error<commit_after> |
import os
import json
class JSONConfigLoader():
def __init__(self, base_path):
self.sources = [
os.path.dirname(os.getcwd()),
os.path.dirname(os.path.abspath(base_path)),
os.path.expanduser('~'),
'/etc',
]
def load(self, filename):
tries = []
for source in self.sources:
file_path = os.path.join(source, filename)
tries.append(file_path)
if not os.path.exists(file_path):
continue
with open(file_path) as f:
return json.load(f)
raise Exception("Config file not found in: %s" % tries)
|
import os
import json
class JSONConfigLoader():
def __init__(self):
self.sources = [
os.path.dirname(os.getcwd()),
os.path.dirname(os.path.abspath(__file__)),
os.path.expanduser('~'),
'/etc',
]
def load(self, filename):
for source in self.sources:
file_path = os.path.join(source, filename)
if not os.path.exists(file_path):
continue
with open(file_path) as f:
return json.load(f)
return None
Raise exception in case of config load error
import os
import json
class JSONConfigLoader():
def __init__(self, base_path):
self.sources = [
os.path.dirname(os.getcwd()),
os.path.dirname(os.path.abspath(base_path)),
os.path.expanduser('~'),
'/etc',
]
def load(self, filename):
tries = []
for source in self.sources:
file_path = os.path.join(source, filename)
tries.append(file_path)
if not os.path.exists(file_path):
continue
with open(file_path) as f:
return json.load(f)
raise Exception("Config file not found in: %s" % tries)
| <commit_before>
import os
import json
class JSONConfigLoader():
def __init__(self):
self.sources = [
os.path.dirname(os.getcwd()),
os.path.dirname(os.path.abspath(__file__)),
os.path.expanduser('~'),
'/etc',
]
def load(self, filename):
for source in self.sources:
file_path = os.path.join(source, filename)
if not os.path.exists(file_path):
continue
with open(file_path) as f:
return json.load(f)
return None
<commit_msg>Raise exception in case of config load error<commit_after>
import os
import json
class JSONConfigLoader():
def __init__(self, base_path):
self.sources = [
os.path.dirname(os.getcwd()),
os.path.dirname(os.path.abspath(base_path)),
os.path.expanduser('~'),
'/etc',
]
def load(self, filename):
tries = []
for source in self.sources:
file_path = os.path.join(source, filename)
tries.append(file_path)
if not os.path.exists(file_path):
continue
with open(file_path) as f:
return json.load(f)
raise Exception("Config file not found in: %s" % tries)
|
a48ae09ce927622e8a5931dbcb843523d8f4bd23 | wagtail/tests/test_utils.py | wagtail/tests/test_utils.py | # -*- coding: utf-8 -*
from __future__ import absolute_import, unicode_literals
import warnings
from django.test import SimpleTestCase
from wagtail.utils.deprecation import RemovedInWagtail17Warning, SearchFieldsShouldBeAList
class TestThisShouldBeAList(SimpleTestCase):
def test_add_a_list(self):
with warnings.catch_warnings(record=True) as w:
base = SearchFieldsShouldBeAList(['hello'])
result = base + ['world']
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that no deprecation warnings were raised
self.assertEqual(len(w), 0)
def test_add_a_tuple(self):
with warnings.catch_warnings(record=True) as w:
base = SearchFieldsShouldBeAList(['hello'])
result = base + ('world',)
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that a deprecation warning was raised
self.assertEqual(len(w), 1)
warning = w[0]
self.assertIs(warning.category, RemovedInWagtail17Warning)
| # -*- coding: utf-8 -*
from __future__ import absolute_import, unicode_literals
import warnings
from django.test import SimpleTestCase
from wagtail.utils.deprecation import RemovedInWagtail17Warning, SearchFieldsShouldBeAList
class TestThisShouldBeAList(SimpleTestCase):
def test_add_a_list(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
base = SearchFieldsShouldBeAList(['hello'])
result = base + ['world']
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that no deprecation warnings were raised
self.assertEqual(len(w), 0)
def test_add_a_tuple(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
base = SearchFieldsShouldBeAList(['hello'])
result = base + ('world',)
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that a deprecation warning was raised
self.assertEqual(len(w), 1)
warning = w[0]
self.assertIs(warning.category, RemovedInWagtail17Warning)
| Reset warnings before testing warnings | Reset warnings before testing warnings
| Python | bsd-3-clause | nimasmi/wagtail,torchbox/wagtail,mikedingjan/wagtail,kaedroho/wagtail,zerolab/wagtail,zerolab/wagtail,wagtail/wagtail,chrxr/wagtail,hamsterbacke23/wagtail,rsalmaso/wagtail,kurtrwall/wagtail,quru/wagtail,chrxr/wagtail,kaedroho/wagtail,Toshakins/wagtail,zerolab/wagtail,nealtodd/wagtail,nutztherookie/wagtail,Toshakins/wagtail,nimasmi/wagtail,thenewguy/wagtail,kaedroho/wagtail,takeflight/wagtail,rsalmaso/wagtail,nutztherookie/wagtail,quru/wagtail,wagtail/wagtail,nimasmi/wagtail,mixxorz/wagtail,timorieber/wagtail,torchbox/wagtail,nilnvoid/wagtail,takeflight/wagtail,thenewguy/wagtail,rsalmaso/wagtail,nilnvoid/wagtail,wagtail/wagtail,nimasmi/wagtail,mixxorz/wagtail,mikedingjan/wagtail,gasman/wagtail,zerolab/wagtail,jnns/wagtail,nilnvoid/wagtail,kurtw/wagtail,iansprice/wagtail,wagtail/wagtail,hamsterbacke23/wagtail,timorieber/wagtail,takeflight/wagtail,zerolab/wagtail,iansprice/wagtail,jnns/wagtail,kaedroho/wagtail,kurtrwall/wagtail,torchbox/wagtail,quru/wagtail,rsalmaso/wagtail,mixxorz/wagtail,FlipperPA/wagtail,FlipperPA/wagtail,hamsterbacke23/wagtail,torchbox/wagtail,mikedingjan/wagtail,quru/wagtail,nutztherookie/wagtail,takeflight/wagtail,Toshakins/wagtail,gasman/wagtail,thenewguy/wagtail,kurtrwall/wagtail,gasman/wagtail,mixxorz/wagtail,nealtodd/wagtail,kurtw/wagtail,iansprice/wagtail,mixxorz/wagtail,mikedingjan/wagtail,FlipperPA/wagtail,jnns/wagtail,rsalmaso/wagtail,nealtodd/wagtail,kurtw/wagtail,thenewguy/wagtail,Toshakins/wagtail,iansprice/wagtail,thenewguy/wagtail,nealtodd/wagtail,kaedroho/wagtail,jnns/wagtail,nilnvoid/wagtail,kurtrwall/wagtail,timorieber/wagtail,chrxr/wagtail,hamsterbacke23/wagtail,gasman/wagtail,timorieber/wagtail,nutztherookie/wagtail,gasman/wagtail,kurtw/wagtail,wagtail/wagtail,FlipperPA/wagtail,chrxr/wagtail | # -*- coding: utf-8 -*
from __future__ import absolute_import, unicode_literals
import warnings
from django.test import SimpleTestCase
from wagtail.utils.deprecation import RemovedInWagtail17Warning, SearchFieldsShouldBeAList
class TestThisShouldBeAList(SimpleTestCase):
def test_add_a_list(self):
with warnings.catch_warnings(record=True) as w:
base = SearchFieldsShouldBeAList(['hello'])
result = base + ['world']
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that no deprecation warnings were raised
self.assertEqual(len(w), 0)
def test_add_a_tuple(self):
with warnings.catch_warnings(record=True) as w:
base = SearchFieldsShouldBeAList(['hello'])
result = base + ('world',)
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that a deprecation warning was raised
self.assertEqual(len(w), 1)
warning = w[0]
self.assertIs(warning.category, RemovedInWagtail17Warning)
Reset warnings before testing warnings | # -*- coding: utf-8 -*
from __future__ import absolute_import, unicode_literals
import warnings
from django.test import SimpleTestCase
from wagtail.utils.deprecation import RemovedInWagtail17Warning, SearchFieldsShouldBeAList
class TestThisShouldBeAList(SimpleTestCase):
def test_add_a_list(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
base = SearchFieldsShouldBeAList(['hello'])
result = base + ['world']
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that no deprecation warnings were raised
self.assertEqual(len(w), 0)
def test_add_a_tuple(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
base = SearchFieldsShouldBeAList(['hello'])
result = base + ('world',)
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that a deprecation warning was raised
self.assertEqual(len(w), 1)
warning = w[0]
self.assertIs(warning.category, RemovedInWagtail17Warning)
| <commit_before># -*- coding: utf-8 -*
from __future__ import absolute_import, unicode_literals
import warnings
from django.test import SimpleTestCase
from wagtail.utils.deprecation import RemovedInWagtail17Warning, SearchFieldsShouldBeAList
class TestThisShouldBeAList(SimpleTestCase):
def test_add_a_list(self):
with warnings.catch_warnings(record=True) as w:
base = SearchFieldsShouldBeAList(['hello'])
result = base + ['world']
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that no deprecation warnings were raised
self.assertEqual(len(w), 0)
def test_add_a_tuple(self):
with warnings.catch_warnings(record=True) as w:
base = SearchFieldsShouldBeAList(['hello'])
result = base + ('world',)
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that a deprecation warning was raised
self.assertEqual(len(w), 1)
warning = w[0]
self.assertIs(warning.category, RemovedInWagtail17Warning)
<commit_msg>Reset warnings before testing warnings<commit_after> | # -*- coding: utf-8 -*
from __future__ import absolute_import, unicode_literals
import warnings
from django.test import SimpleTestCase
from wagtail.utils.deprecation import RemovedInWagtail17Warning, SearchFieldsShouldBeAList
class TestThisShouldBeAList(SimpleTestCase):
def test_add_a_list(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
base = SearchFieldsShouldBeAList(['hello'])
result = base + ['world']
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that no deprecation warnings were raised
self.assertEqual(len(w), 0)
def test_add_a_tuple(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
base = SearchFieldsShouldBeAList(['hello'])
result = base + ('world',)
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that a deprecation warning was raised
self.assertEqual(len(w), 1)
warning = w[0]
self.assertIs(warning.category, RemovedInWagtail17Warning)
| # -*- coding: utf-8 -*
from __future__ import absolute_import, unicode_literals
import warnings
from django.test import SimpleTestCase
from wagtail.utils.deprecation import RemovedInWagtail17Warning, SearchFieldsShouldBeAList
class TestThisShouldBeAList(SimpleTestCase):
def test_add_a_list(self):
with warnings.catch_warnings(record=True) as w:
base = SearchFieldsShouldBeAList(['hello'])
result = base + ['world']
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that no deprecation warnings were raised
self.assertEqual(len(w), 0)
def test_add_a_tuple(self):
with warnings.catch_warnings(record=True) as w:
base = SearchFieldsShouldBeAList(['hello'])
result = base + ('world',)
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that a deprecation warning was raised
self.assertEqual(len(w), 1)
warning = w[0]
self.assertIs(warning.category, RemovedInWagtail17Warning)
Reset warnings before testing warnings# -*- coding: utf-8 -*
from __future__ import absolute_import, unicode_literals
import warnings
from django.test import SimpleTestCase
from wagtail.utils.deprecation import RemovedInWagtail17Warning, SearchFieldsShouldBeAList
class TestThisShouldBeAList(SimpleTestCase):
def test_add_a_list(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
base = SearchFieldsShouldBeAList(['hello'])
result = base + ['world']
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that no deprecation warnings were raised
self.assertEqual(len(w), 0)
def test_add_a_tuple(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
base = SearchFieldsShouldBeAList(['hello'])
result = base + ('world',)
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that a deprecation warning was raised
self.assertEqual(len(w), 1)
warning = w[0]
self.assertIs(warning.category, RemovedInWagtail17Warning)
| <commit_before># -*- coding: utf-8 -*
from __future__ import absolute_import, unicode_literals
import warnings
from django.test import SimpleTestCase
from wagtail.utils.deprecation import RemovedInWagtail17Warning, SearchFieldsShouldBeAList
class TestThisShouldBeAList(SimpleTestCase):
def test_add_a_list(self):
with warnings.catch_warnings(record=True) as w:
base = SearchFieldsShouldBeAList(['hello'])
result = base + ['world']
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that no deprecation warnings were raised
self.assertEqual(len(w), 0)
def test_add_a_tuple(self):
with warnings.catch_warnings(record=True) as w:
base = SearchFieldsShouldBeAList(['hello'])
result = base + ('world',)
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that a deprecation warning was raised
self.assertEqual(len(w), 1)
warning = w[0]
self.assertIs(warning.category, RemovedInWagtail17Warning)
<commit_msg>Reset warnings before testing warnings<commit_after># -*- coding: utf-8 -*
from __future__ import absolute_import, unicode_literals
import warnings
from django.test import SimpleTestCase
from wagtail.utils.deprecation import RemovedInWagtail17Warning, SearchFieldsShouldBeAList
class TestThisShouldBeAList(SimpleTestCase):
def test_add_a_list(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
base = SearchFieldsShouldBeAList(['hello'])
result = base + ['world']
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that no deprecation warnings were raised
self.assertEqual(len(w), 0)
def test_add_a_tuple(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
base = SearchFieldsShouldBeAList(['hello'])
result = base + ('world',)
# Ensure that adding things together works
self.assertEqual(result, ['hello', 'world'])
# Ensure that a new SearchFieldsShouldBeAList was returned
self.assertIsInstance(result, SearchFieldsShouldBeAList)
# Check that a deprecation warning was raised
self.assertEqual(len(w), 1)
warning = w[0]
self.assertIs(warning.category, RemovedInWagtail17Warning)
|
a436470f00dddcb1764da6b6dc244e86bc71d473 | gscripts/ipython_imports.py | gscripts/ipython_imports.py | import numpy as np
import pandas as pd
import matplotlib_venn
import matplotlib.pyplot as plt
import brewer2mpl
import itertools
set1 = brewer2mpl.get_map('Set1', 'qualitative', 9).mpl_colors
red = set1[0]
blue = set1[1]
green = set1[2]
purple = set1[3]
orange = set1[4]
yellow = set1[5]
brown = set1[6]
pink = set1[7]
grey = set1[8] | import numpy as np
import pandas as pd
import matplotlib_venn
import matplotlib.pyplot as plt
import brewer2mpl
import itertools
import seaborn as sns
import collections
import itertools
set1 = brewer2mpl.get_map('Set1', 'qualitative', 9).mpl_colors
red = set1[0]
blue = set1[1]
green = set1[2]
purple = set1[3]
orange = set1[4]
yellow = set1[5]
brown = set1[6]
pink = set1[7]
grey = set1[8] | Add seaborn, collections, itertools to IPython imports | Add seaborn, collections, itertools to IPython imports
| Python | mit | YeoLab/gscripts,YeoLab/gscripts,YeoLab/gscripts,YeoLab/gscripts | import numpy as np
import pandas as pd
import matplotlib_venn
import matplotlib.pyplot as plt
import brewer2mpl
import itertools
set1 = brewer2mpl.get_map('Set1', 'qualitative', 9).mpl_colors
red = set1[0]
blue = set1[1]
green = set1[2]
purple = set1[3]
orange = set1[4]
yellow = set1[5]
brown = set1[6]
pink = set1[7]
grey = set1[8]Add seaborn, collections, itertools to IPython imports | import numpy as np
import pandas as pd
import matplotlib_venn
import matplotlib.pyplot as plt
import brewer2mpl
import itertools
import seaborn as sns
import collections
import itertools
set1 = brewer2mpl.get_map('Set1', 'qualitative', 9).mpl_colors
red = set1[0]
blue = set1[1]
green = set1[2]
purple = set1[3]
orange = set1[4]
yellow = set1[5]
brown = set1[6]
pink = set1[7]
grey = set1[8] | <commit_before>import numpy as np
import pandas as pd
import matplotlib_venn
import matplotlib.pyplot as plt
import brewer2mpl
import itertools
set1 = brewer2mpl.get_map('Set1', 'qualitative', 9).mpl_colors
red = set1[0]
blue = set1[1]
green = set1[2]
purple = set1[3]
orange = set1[4]
yellow = set1[5]
brown = set1[6]
pink = set1[7]
grey = set1[8]<commit_msg>Add seaborn, collections, itertools to IPython imports<commit_after> | import numpy as np
import pandas as pd
import matplotlib_venn
import matplotlib.pyplot as plt
import brewer2mpl
import itertools
import seaborn as sns
import collections
import itertools
set1 = brewer2mpl.get_map('Set1', 'qualitative', 9).mpl_colors
red = set1[0]
blue = set1[1]
green = set1[2]
purple = set1[3]
orange = set1[4]
yellow = set1[5]
brown = set1[6]
pink = set1[7]
grey = set1[8] | import numpy as np
import pandas as pd
import matplotlib_venn
import matplotlib.pyplot as plt
import brewer2mpl
import itertools
set1 = brewer2mpl.get_map('Set1', 'qualitative', 9).mpl_colors
red = set1[0]
blue = set1[1]
green = set1[2]
purple = set1[3]
orange = set1[4]
yellow = set1[5]
brown = set1[6]
pink = set1[7]
grey = set1[8]Add seaborn, collections, itertools to IPython importsimport numpy as np
import pandas as pd
import matplotlib_venn
import matplotlib.pyplot as plt
import brewer2mpl
import itertools
import seaborn as sns
import collections
import itertools
set1 = brewer2mpl.get_map('Set1', 'qualitative', 9).mpl_colors
red = set1[0]
blue = set1[1]
green = set1[2]
purple = set1[3]
orange = set1[4]
yellow = set1[5]
brown = set1[6]
pink = set1[7]
grey = set1[8] | <commit_before>import numpy as np
import pandas as pd
import matplotlib_venn
import matplotlib.pyplot as plt
import brewer2mpl
import itertools
set1 = brewer2mpl.get_map('Set1', 'qualitative', 9).mpl_colors
red = set1[0]
blue = set1[1]
green = set1[2]
purple = set1[3]
orange = set1[4]
yellow = set1[5]
brown = set1[6]
pink = set1[7]
grey = set1[8]<commit_msg>Add seaborn, collections, itertools to IPython imports<commit_after>import numpy as np
import pandas as pd
import matplotlib_venn
import matplotlib.pyplot as plt
import brewer2mpl
import itertools
import seaborn as sns
import collections
import itertools
set1 = brewer2mpl.get_map('Set1', 'qualitative', 9).mpl_colors
red = set1[0]
blue = set1[1]
green = set1[2]
purple = set1[3]
orange = set1[4]
yellow = set1[5]
brown = set1[6]
pink = set1[7]
grey = set1[8] |
5f56b2094180eb1b6922b58aece611e26ce5d1df | packages/reward-root-submitter/reward_root_submitter/lambda.py | packages/reward-root-submitter/reward_root_submitter/lambda.py | import logging
import urllib
import sentry_sdk
from cloudpathlib import AnyPath
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
from .config import Config
from .main import get_all_unsubmitted_roots, process_file, setup_logging
config = Config()
setup_logging(config)
sentry_sdk.init(
dsn=config.reward_root_submitter_sentry_dsn,
integrations=[
AwsLambdaIntegration(),
],
environment=config.environment,
traces_sample_rate=1.0,
)
def handler(event, _context):
if event.get("source") == "aws.events":
logging.info("Cron event triggered")
unsubmitted_roots = get_all_unsubmitted_roots(config)
for index, row in unsubmitted_roots.iterrows():
process_file(row["file"], config)
elif event["Records"][0]["eventSource"] == "aws.s3":
bucket = event["Records"][0]["s3"]["bucket"]["name"]
key = urllib.parse.unquote_plus(
event["Records"][0]["s3"]["object"]["key"], encoding="utf-8"
)
process_file(AnyPath(f"s3://{bucket}/{key}"), Config())
| import logging
import urllib
import sentry_sdk
from cloudpathlib import AnyPath
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
from .config import Config
from .main import get_all_unsubmitted_roots, process_file, setup_logging
config = Config()
setup_logging(config)
sentry_sdk.init(
dsn=config.reward_root_submitter_sentry_dsn,
integrations=[
AwsLambdaIntegration(),
],
environment=config.environment,
traces_sample_rate=1.0,
)
def handler(event, _context):
if event.get("source") == "aws.events":
logging.info("Cron event triggered")
unsubmitted_roots = get_all_unsubmitted_roots(config)
for index, row in unsubmitted_roots.iterrows():
process_file(row["file"], config)
elif event["Records"][0]["eventSource"] == "aws:s3":
bucket = event["Records"][0]["s3"]["bucket"]["name"]
key = urllib.parse.unquote_plus(
event["Records"][0]["s3"]["object"]["key"], encoding="utf-8"
)
process_file(AnyPath(f"s3://{bucket}/{key}"), Config())
| Fix event source for S3 put events | Fix event source for S3 put events | Python | mit | cardstack/cardstack,cardstack/cardstack,cardstack/cardstack,cardstack/cardstack,cardstack/cardstack,cardstack/cardstack | import logging
import urllib
import sentry_sdk
from cloudpathlib import AnyPath
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
from .config import Config
from .main import get_all_unsubmitted_roots, process_file, setup_logging
config = Config()
setup_logging(config)
sentry_sdk.init(
dsn=config.reward_root_submitter_sentry_dsn,
integrations=[
AwsLambdaIntegration(),
],
environment=config.environment,
traces_sample_rate=1.0,
)
def handler(event, _context):
if event.get("source") == "aws.events":
logging.info("Cron event triggered")
unsubmitted_roots = get_all_unsubmitted_roots(config)
for index, row in unsubmitted_roots.iterrows():
process_file(row["file"], config)
elif event["Records"][0]["eventSource"] == "aws.s3":
bucket = event["Records"][0]["s3"]["bucket"]["name"]
key = urllib.parse.unquote_plus(
event["Records"][0]["s3"]["object"]["key"], encoding="utf-8"
)
process_file(AnyPath(f"s3://{bucket}/{key}"), Config())
Fix event source for S3 put events | import logging
import urllib
import sentry_sdk
from cloudpathlib import AnyPath
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
from .config import Config
from .main import get_all_unsubmitted_roots, process_file, setup_logging
config = Config()
setup_logging(config)
sentry_sdk.init(
dsn=config.reward_root_submitter_sentry_dsn,
integrations=[
AwsLambdaIntegration(),
],
environment=config.environment,
traces_sample_rate=1.0,
)
def handler(event, _context):
if event.get("source") == "aws.events":
logging.info("Cron event triggered")
unsubmitted_roots = get_all_unsubmitted_roots(config)
for index, row in unsubmitted_roots.iterrows():
process_file(row["file"], config)
elif event["Records"][0]["eventSource"] == "aws:s3":
bucket = event["Records"][0]["s3"]["bucket"]["name"]
key = urllib.parse.unquote_plus(
event["Records"][0]["s3"]["object"]["key"], encoding="utf-8"
)
process_file(AnyPath(f"s3://{bucket}/{key}"), Config())
| <commit_before>import logging
import urllib
import sentry_sdk
from cloudpathlib import AnyPath
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
from .config import Config
from .main import get_all_unsubmitted_roots, process_file, setup_logging
config = Config()
setup_logging(config)
sentry_sdk.init(
dsn=config.reward_root_submitter_sentry_dsn,
integrations=[
AwsLambdaIntegration(),
],
environment=config.environment,
traces_sample_rate=1.0,
)
def handler(event, _context):
if event.get("source") == "aws.events":
logging.info("Cron event triggered")
unsubmitted_roots = get_all_unsubmitted_roots(config)
for index, row in unsubmitted_roots.iterrows():
process_file(row["file"], config)
elif event["Records"][0]["eventSource"] == "aws.s3":
bucket = event["Records"][0]["s3"]["bucket"]["name"]
key = urllib.parse.unquote_plus(
event["Records"][0]["s3"]["object"]["key"], encoding="utf-8"
)
process_file(AnyPath(f"s3://{bucket}/{key}"), Config())
<commit_msg>Fix event source for S3 put events<commit_after> | import logging
import urllib
import sentry_sdk
from cloudpathlib import AnyPath
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
from .config import Config
from .main import get_all_unsubmitted_roots, process_file, setup_logging
config = Config()
setup_logging(config)
sentry_sdk.init(
dsn=config.reward_root_submitter_sentry_dsn,
integrations=[
AwsLambdaIntegration(),
],
environment=config.environment,
traces_sample_rate=1.0,
)
def handler(event, _context):
if event.get("source") == "aws.events":
logging.info("Cron event triggered")
unsubmitted_roots = get_all_unsubmitted_roots(config)
for index, row in unsubmitted_roots.iterrows():
process_file(row["file"], config)
elif event["Records"][0]["eventSource"] == "aws:s3":
bucket = event["Records"][0]["s3"]["bucket"]["name"]
key = urllib.parse.unquote_plus(
event["Records"][0]["s3"]["object"]["key"], encoding="utf-8"
)
process_file(AnyPath(f"s3://{bucket}/{key}"), Config())
| import logging
import urllib
import sentry_sdk
from cloudpathlib import AnyPath
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
from .config import Config
from .main import get_all_unsubmitted_roots, process_file, setup_logging
config = Config()
setup_logging(config)
sentry_sdk.init(
dsn=config.reward_root_submitter_sentry_dsn,
integrations=[
AwsLambdaIntegration(),
],
environment=config.environment,
traces_sample_rate=1.0,
)
def handler(event, _context):
if event.get("source") == "aws.events":
logging.info("Cron event triggered")
unsubmitted_roots = get_all_unsubmitted_roots(config)
for index, row in unsubmitted_roots.iterrows():
process_file(row["file"], config)
elif event["Records"][0]["eventSource"] == "aws.s3":
bucket = event["Records"][0]["s3"]["bucket"]["name"]
key = urllib.parse.unquote_plus(
event["Records"][0]["s3"]["object"]["key"], encoding="utf-8"
)
process_file(AnyPath(f"s3://{bucket}/{key}"), Config())
Fix event source for S3 put eventsimport logging
import urllib
import sentry_sdk
from cloudpathlib import AnyPath
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
from .config import Config
from .main import get_all_unsubmitted_roots, process_file, setup_logging
config = Config()
setup_logging(config)
sentry_sdk.init(
dsn=config.reward_root_submitter_sentry_dsn,
integrations=[
AwsLambdaIntegration(),
],
environment=config.environment,
traces_sample_rate=1.0,
)
def handler(event, _context):
if event.get("source") == "aws.events":
logging.info("Cron event triggered")
unsubmitted_roots = get_all_unsubmitted_roots(config)
for index, row in unsubmitted_roots.iterrows():
process_file(row["file"], config)
elif event["Records"][0]["eventSource"] == "aws:s3":
bucket = event["Records"][0]["s3"]["bucket"]["name"]
key = urllib.parse.unquote_plus(
event["Records"][0]["s3"]["object"]["key"], encoding="utf-8"
)
process_file(AnyPath(f"s3://{bucket}/{key}"), Config())
| <commit_before>import logging
import urllib
import sentry_sdk
from cloudpathlib import AnyPath
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
from .config import Config
from .main import get_all_unsubmitted_roots, process_file, setup_logging
config = Config()
setup_logging(config)
sentry_sdk.init(
dsn=config.reward_root_submitter_sentry_dsn,
integrations=[
AwsLambdaIntegration(),
],
environment=config.environment,
traces_sample_rate=1.0,
)
def handler(event, _context):
if event.get("source") == "aws.events":
logging.info("Cron event triggered")
unsubmitted_roots = get_all_unsubmitted_roots(config)
for index, row in unsubmitted_roots.iterrows():
process_file(row["file"], config)
elif event["Records"][0]["eventSource"] == "aws.s3":
bucket = event["Records"][0]["s3"]["bucket"]["name"]
key = urllib.parse.unquote_plus(
event["Records"][0]["s3"]["object"]["key"], encoding="utf-8"
)
process_file(AnyPath(f"s3://{bucket}/{key}"), Config())
<commit_msg>Fix event source for S3 put events<commit_after>import logging
import urllib
import sentry_sdk
from cloudpathlib import AnyPath
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
from .config import Config
from .main import get_all_unsubmitted_roots, process_file, setup_logging
config = Config()
setup_logging(config)
sentry_sdk.init(
dsn=config.reward_root_submitter_sentry_dsn,
integrations=[
AwsLambdaIntegration(),
],
environment=config.environment,
traces_sample_rate=1.0,
)
def handler(event, _context):
if event.get("source") == "aws.events":
logging.info("Cron event triggered")
unsubmitted_roots = get_all_unsubmitted_roots(config)
for index, row in unsubmitted_roots.iterrows():
process_file(row["file"], config)
elif event["Records"][0]["eventSource"] == "aws:s3":
bucket = event["Records"][0]["s3"]["bucket"]["name"]
key = urllib.parse.unquote_plus(
event["Records"][0]["s3"]["object"]["key"], encoding="utf-8"
)
process_file(AnyPath(f"s3://{bucket}/{key}"), Config())
|
7fb1212ab97bca6301d9826258a594f8935bba28 | mopidy_ttsgpio/tts.py | mopidy_ttsgpio/tts.py | import urllib
import gst
music_level = 30
class TTS():
def __init__(self, frontend, config):
self.frontend = frontend
self.player = gst.element_factory_make("playbin", "tts")
output = gst.parse_bin_from_description(config['audio']['output'],
ghost_unconnected_pads=True)
volume = config['ttsgpio']['tts_default_volume']
self.player.set_property('volume', volume)
self.player.set_property('audio-sink', output)
def speak_text(self, text):
self.player.set_state(gst.STATE_NULL)
params = {}
params['tl'] = 'en'
params['q'] = text.encode('ascii', 'ignore')
music_stream_uri = 'http://translate.google.com/translate_tts?' \
+ urllib.urlencode(params)
self.player.set_property('uri', music_stream_uri)
self.player.set_state(gst.STATE_PLAYING)
bus = self.player.get_bus()
bus.enable_sync_message_emission()
bus.add_signal_watch()
# bus.connect('message::eos', self.end_of_stream)
| import os
from threading import Thread
music_level = 30
class TTS():
def __init__(self, frontend, config):
self.frontend = frontend
def speak_text(self, text):
t = Thread(target=self.speak_text_thread, args=(text,))
t.start()
def speak_text_thread(self, text):
os.system(' echo "' + text + '" | festival --tts')
| Change from Google TTS to Festival | Change from Google TTS to Festival
| Python | apache-2.0 | 9and3r/mopidy-ttsgpio | import urllib
import gst
music_level = 30
class TTS():
def __init__(self, frontend, config):
self.frontend = frontend
self.player = gst.element_factory_make("playbin", "tts")
output = gst.parse_bin_from_description(config['audio']['output'],
ghost_unconnected_pads=True)
volume = config['ttsgpio']['tts_default_volume']
self.player.set_property('volume', volume)
self.player.set_property('audio-sink', output)
def speak_text(self, text):
self.player.set_state(gst.STATE_NULL)
params = {}
params['tl'] = 'en'
params['q'] = text.encode('ascii', 'ignore')
music_stream_uri = 'http://translate.google.com/translate_tts?' \
+ urllib.urlencode(params)
self.player.set_property('uri', music_stream_uri)
self.player.set_state(gst.STATE_PLAYING)
bus = self.player.get_bus()
bus.enable_sync_message_emission()
bus.add_signal_watch()
# bus.connect('message::eos', self.end_of_stream)
Change from Google TTS to Festival | import os
from threading import Thread
music_level = 30
class TTS():
def __init__(self, frontend, config):
self.frontend = frontend
def speak_text(self, text):
t = Thread(target=self.speak_text_thread, args=(text,))
t.start()
def speak_text_thread(self, text):
os.system(' echo "' + text + '" | festival --tts')
| <commit_before>import urllib
import gst
music_level = 30
class TTS():
def __init__(self, frontend, config):
self.frontend = frontend
self.player = gst.element_factory_make("playbin", "tts")
output = gst.parse_bin_from_description(config['audio']['output'],
ghost_unconnected_pads=True)
volume = config['ttsgpio']['tts_default_volume']
self.player.set_property('volume', volume)
self.player.set_property('audio-sink', output)
def speak_text(self, text):
self.player.set_state(gst.STATE_NULL)
params = {}
params['tl'] = 'en'
params['q'] = text.encode('ascii', 'ignore')
music_stream_uri = 'http://translate.google.com/translate_tts?' \
+ urllib.urlencode(params)
self.player.set_property('uri', music_stream_uri)
self.player.set_state(gst.STATE_PLAYING)
bus = self.player.get_bus()
bus.enable_sync_message_emission()
bus.add_signal_watch()
# bus.connect('message::eos', self.end_of_stream)
<commit_msg>Change from Google TTS to Festival<commit_after> | import os
from threading import Thread
music_level = 30
class TTS():
def __init__(self, frontend, config):
self.frontend = frontend
def speak_text(self, text):
t = Thread(target=self.speak_text_thread, args=(text,))
t.start()
def speak_text_thread(self, text):
os.system(' echo "' + text + '" | festival --tts')
| import urllib
import gst
music_level = 30
class TTS():
def __init__(self, frontend, config):
self.frontend = frontend
self.player = gst.element_factory_make("playbin", "tts")
output = gst.parse_bin_from_description(config['audio']['output'],
ghost_unconnected_pads=True)
volume = config['ttsgpio']['tts_default_volume']
self.player.set_property('volume', volume)
self.player.set_property('audio-sink', output)
def speak_text(self, text):
self.player.set_state(gst.STATE_NULL)
params = {}
params['tl'] = 'en'
params['q'] = text.encode('ascii', 'ignore')
music_stream_uri = 'http://translate.google.com/translate_tts?' \
+ urllib.urlencode(params)
self.player.set_property('uri', music_stream_uri)
self.player.set_state(gst.STATE_PLAYING)
bus = self.player.get_bus()
bus.enable_sync_message_emission()
bus.add_signal_watch()
# bus.connect('message::eos', self.end_of_stream)
Change from Google TTS to Festivalimport os
from threading import Thread
music_level = 30
class TTS():
def __init__(self, frontend, config):
self.frontend = frontend
def speak_text(self, text):
t = Thread(target=self.speak_text_thread, args=(text,))
t.start()
def speak_text_thread(self, text):
os.system(' echo "' + text + '" | festival --tts')
| <commit_before>import urllib
import gst
music_level = 30
class TTS():
def __init__(self, frontend, config):
self.frontend = frontend
self.player = gst.element_factory_make("playbin", "tts")
output = gst.parse_bin_from_description(config['audio']['output'],
ghost_unconnected_pads=True)
volume = config['ttsgpio']['tts_default_volume']
self.player.set_property('volume', volume)
self.player.set_property('audio-sink', output)
def speak_text(self, text):
self.player.set_state(gst.STATE_NULL)
params = {}
params['tl'] = 'en'
params['q'] = text.encode('ascii', 'ignore')
music_stream_uri = 'http://translate.google.com/translate_tts?' \
+ urllib.urlencode(params)
self.player.set_property('uri', music_stream_uri)
self.player.set_state(gst.STATE_PLAYING)
bus = self.player.get_bus()
bus.enable_sync_message_emission()
bus.add_signal_watch()
# bus.connect('message::eos', self.end_of_stream)
<commit_msg>Change from Google TTS to Festival<commit_after>import os
from threading import Thread
music_level = 30
class TTS():
def __init__(self, frontend, config):
self.frontend = frontend
def speak_text(self, text):
t = Thread(target=self.speak_text_thread, args=(text,))
t.start()
def speak_text_thread(self, text):
os.system(' echo "' + text + '" | festival --tts')
|
6da0aaf77fe221286981b94eaf7d304568f55957 | examples/stories/movie_lister/movies/__init__.py | examples/stories/movie_lister/movies/__init__.py | """Movies package.
Top-level package of movies library. This package contains catalog of movies
module components - ``MoviesModule``. It is recommended to use movies library
functionality by fetching required instances from ``MoviesModule`` providers.
Each of ``MoviesModule`` providers could be overridden.
"""
from . import finders
from . import listers
from . import models
from dependency_injector import catalogs
from dependency_injector import providers
class MoviesModule(catalogs.DeclarativeCatalog):
"""Catalog of movies module components."""
movie_model = providers.DelegatedFactory(models.Movie)
movie_finder = providers.Factory(finders.MovieFinder,
movie_model=movie_model)
movie_lister = providers.Factory(listers.MovieLister,
movie_finder=movie_finder)
| """Movies package.
Top-level package of movies library. This package contains catalog of movies
module components - ``MoviesModule``. It is recommended to use movies library
functionality by fetching required instances from ``MoviesModule`` providers.
Each of ``MoviesModule`` providers could be overridden.
"""
from dependency_injector import catalogs
from dependency_injector import providers
from . import finders
from . import listers
from . import models
class MoviesModule(catalogs.DeclarativeCatalog):
"""Catalog of movies module components."""
movie_model = providers.DelegatedFactory(models.Movie)
movie_finder = providers.Factory(finders.MovieFinder,
movie_model=movie_model)
movie_lister = providers.Factory(listers.MovieLister,
movie_finder=movie_finder)
| Update imports for MovieLister standrard module | Update imports for MovieLister standrard module
| Python | bsd-3-clause | rmk135/objects,ets-labs/python-dependency-injector,ets-labs/dependency_injector,rmk135/dependency_injector | """Movies package.
Top-level package of movies library. This package contains catalog of movies
module components - ``MoviesModule``. It is recommended to use movies library
functionality by fetching required instances from ``MoviesModule`` providers.
Each of ``MoviesModule`` providers could be overridden.
"""
from . import finders
from . import listers
from . import models
from dependency_injector import catalogs
from dependency_injector import providers
class MoviesModule(catalogs.DeclarativeCatalog):
"""Catalog of movies module components."""
movie_model = providers.DelegatedFactory(models.Movie)
movie_finder = providers.Factory(finders.MovieFinder,
movie_model=movie_model)
movie_lister = providers.Factory(listers.MovieLister,
movie_finder=movie_finder)
Update imports for MovieLister standrard module | """Movies package.
Top-level package of movies library. This package contains catalog of movies
module components - ``MoviesModule``. It is recommended to use movies library
functionality by fetching required instances from ``MoviesModule`` providers.
Each of ``MoviesModule`` providers could be overridden.
"""
from dependency_injector import catalogs
from dependency_injector import providers
from . import finders
from . import listers
from . import models
class MoviesModule(catalogs.DeclarativeCatalog):
"""Catalog of movies module components."""
movie_model = providers.DelegatedFactory(models.Movie)
movie_finder = providers.Factory(finders.MovieFinder,
movie_model=movie_model)
movie_lister = providers.Factory(listers.MovieLister,
movie_finder=movie_finder)
| <commit_before>"""Movies package.
Top-level package of movies library. This package contains catalog of movies
module components - ``MoviesModule``. It is recommended to use movies library
functionality by fetching required instances from ``MoviesModule`` providers.
Each of ``MoviesModule`` providers could be overridden.
"""
from . import finders
from . import listers
from . import models
from dependency_injector import catalogs
from dependency_injector import providers
class MoviesModule(catalogs.DeclarativeCatalog):
"""Catalog of movies module components."""
movie_model = providers.DelegatedFactory(models.Movie)
movie_finder = providers.Factory(finders.MovieFinder,
movie_model=movie_model)
movie_lister = providers.Factory(listers.MovieLister,
movie_finder=movie_finder)
<commit_msg>Update imports for MovieLister standrard module<commit_after> | """Movies package.
Top-level package of movies library. This package contains catalog of movies
module components - ``MoviesModule``. It is recommended to use movies library
functionality by fetching required instances from ``MoviesModule`` providers.
Each of ``MoviesModule`` providers could be overridden.
"""
from dependency_injector import catalogs
from dependency_injector import providers
from . import finders
from . import listers
from . import models
class MoviesModule(catalogs.DeclarativeCatalog):
"""Catalog of movies module components."""
movie_model = providers.DelegatedFactory(models.Movie)
movie_finder = providers.Factory(finders.MovieFinder,
movie_model=movie_model)
movie_lister = providers.Factory(listers.MovieLister,
movie_finder=movie_finder)
| """Movies package.
Top-level package of movies library. This package contains catalog of movies
module components - ``MoviesModule``. It is recommended to use movies library
functionality by fetching required instances from ``MoviesModule`` providers.
Each of ``MoviesModule`` providers could be overridden.
"""
from . import finders
from . import listers
from . import models
from dependency_injector import catalogs
from dependency_injector import providers
class MoviesModule(catalogs.DeclarativeCatalog):
"""Catalog of movies module components."""
movie_model = providers.DelegatedFactory(models.Movie)
movie_finder = providers.Factory(finders.MovieFinder,
movie_model=movie_model)
movie_lister = providers.Factory(listers.MovieLister,
movie_finder=movie_finder)
Update imports for MovieLister standrard module"""Movies package.
Top-level package of movies library. This package contains catalog of movies
module components - ``MoviesModule``. It is recommended to use movies library
functionality by fetching required instances from ``MoviesModule`` providers.
Each of ``MoviesModule`` providers could be overridden.
"""
from dependency_injector import catalogs
from dependency_injector import providers
from . import finders
from . import listers
from . import models
class MoviesModule(catalogs.DeclarativeCatalog):
"""Catalog of movies module components."""
movie_model = providers.DelegatedFactory(models.Movie)
movie_finder = providers.Factory(finders.MovieFinder,
movie_model=movie_model)
movie_lister = providers.Factory(listers.MovieLister,
movie_finder=movie_finder)
| <commit_before>"""Movies package.
Top-level package of movies library. This package contains catalog of movies
module components - ``MoviesModule``. It is recommended to use movies library
functionality by fetching required instances from ``MoviesModule`` providers.
Each of ``MoviesModule`` providers could be overridden.
"""
from . import finders
from . import listers
from . import models
from dependency_injector import catalogs
from dependency_injector import providers
class MoviesModule(catalogs.DeclarativeCatalog):
"""Catalog of movies module components."""
movie_model = providers.DelegatedFactory(models.Movie)
movie_finder = providers.Factory(finders.MovieFinder,
movie_model=movie_model)
movie_lister = providers.Factory(listers.MovieLister,
movie_finder=movie_finder)
<commit_msg>Update imports for MovieLister standrard module<commit_after>"""Movies package.
Top-level package of movies library. This package contains catalog of movies
module components - ``MoviesModule``. It is recommended to use movies library
functionality by fetching required instances from ``MoviesModule`` providers.
Each of ``MoviesModule`` providers could be overridden.
"""
from dependency_injector import catalogs
from dependency_injector import providers
from . import finders
from . import listers
from . import models
class MoviesModule(catalogs.DeclarativeCatalog):
"""Catalog of movies module components."""
movie_model = providers.DelegatedFactory(models.Movie)
movie_finder = providers.Factory(finders.MovieFinder,
movie_model=movie_model)
movie_lister = providers.Factory(listers.MovieLister,
movie_finder=movie_finder)
|
9167d5e85d618d1786c8c72eb1eb0cb2f23a8043 | backdrop/write/config/development_environment_sample.py | backdrop/write/config/development_environment_sample.py | # Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'licensing__monitoring': 'licensing_monitoring_bearer_token',
'licence_finder_monitoring': 'licence_finder_monitoring_bearer_token',
'govuk_realtime': 'govuk_realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
| # Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'licensing_monitoring': 'licensing_monitoring_bearer_token',
'licence_finder_monitoring': 'licence_finder_monitoring_bearer_token',
'govuk_realtime': 'govuk_realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
| Fix typo in bucket name | Fix typo in bucket name
| Python | mit | alphagov/backdrop,alphagov/backdrop,alphagov/backdrop | # Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'licensing__monitoring': 'licensing_monitoring_bearer_token',
'licence_finder_monitoring': 'licence_finder_monitoring_bearer_token',
'govuk_realtime': 'govuk_realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
Fix typo in bucket name | # Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'licensing_monitoring': 'licensing_monitoring_bearer_token',
'licence_finder_monitoring': 'licence_finder_monitoring_bearer_token',
'govuk_realtime': 'govuk_realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
| <commit_before># Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'licensing__monitoring': 'licensing_monitoring_bearer_token',
'licence_finder_monitoring': 'licence_finder_monitoring_bearer_token',
'govuk_realtime': 'govuk_realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
<commit_msg>Fix typo in bucket name<commit_after> | # Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'licensing_monitoring': 'licensing_monitoring_bearer_token',
'licence_finder_monitoring': 'licence_finder_monitoring_bearer_token',
'govuk_realtime': 'govuk_realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
| # Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'licensing__monitoring': 'licensing_monitoring_bearer_token',
'licence_finder_monitoring': 'licence_finder_monitoring_bearer_token',
'govuk_realtime': 'govuk_realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
Fix typo in bucket name# Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'licensing_monitoring': 'licensing_monitoring_bearer_token',
'licence_finder_monitoring': 'licence_finder_monitoring_bearer_token',
'govuk_realtime': 'govuk_realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
| <commit_before># Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'licensing__monitoring': 'licensing_monitoring_bearer_token',
'licence_finder_monitoring': 'licence_finder_monitoring_bearer_token',
'govuk_realtime': 'govuk_realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
<commit_msg>Fix typo in bucket name<commit_after># Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'licensing_monitoring': 'licensing_monitoring_bearer_token',
'licence_finder_monitoring': 'licence_finder_monitoring_bearer_token',
'govuk_realtime': 'govuk_realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
|
91bd7690c1e48b52a270bc45626e771663828c28 | pact/group.py | pact/group.py | from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
super(PactGroup, self).__init__()
self._pacts = list(pacts)
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
| from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
| Fix PactGroup created log message | Fix PactGroup created log message
| Python | bsd-3-clause | vmalloc/pact | from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
super(PactGroup, self).__init__()
self._pacts = list(pacts)
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
Fix PactGroup created log message | from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
| <commit_before>from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
super(PactGroup, self).__init__()
self._pacts = list(pacts)
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
<commit_msg>Fix PactGroup created log message<commit_after> | from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
| from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
super(PactGroup, self).__init__()
self._pacts = list(pacts)
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
Fix PactGroup created log messagefrom .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
| <commit_before>from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
super(PactGroup, self).__init__()
self._pacts = list(pacts)
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
<commit_msg>Fix PactGroup created log message<commit_after>from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
|
6db1ddd9c7776cf07222ae58dc9b2c44135ac59a | spacy/__init__.py | spacy/__init__.py | # coding: utf8
from __future__ import unicode_literals
import warnings
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
# These are imported as part of the API
from thinc.neural.util import prefer_gpu, require_gpu
from .cli.info import info as cli_info
from .glossary import explain
from .about import __version__
from .errors import Warnings, deprecation_warning
from . import util
def load(name, **overrides):
depr_path = overrides.get("path")
if depr_path not in (True, False, None):
deprecation_warning(Warnings.W001.format(path=depr_path))
return util.load_model(name, **overrides)
def blank(name, **kwargs):
LangClass = util.get_lang_class(name)
return LangClass(**kwargs)
def info(model=None, markdown=False, silent=False):
return cli_info(model, markdown, silent)
| # coding: utf8
from __future__ import unicode_literals
import warnings
import sys
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
# These are imported as part of the API
from thinc.neural.util import prefer_gpu, require_gpu
from .cli.info import info as cli_info
from .glossary import explain
from .about import __version__
from .errors import Warnings, deprecation_warning
from . import util
if __version__ >= '2.1.0' and sys.maxunicode <= 65535:
raise ValueError('''You are running a narrow unicode build,
which is incompatible with spacy >= 2.1.0, reinstall Python and use a
wide unicode build instead. You can also rebuild Python and
set the --enable-unicode=ucs4 flag.''')
def load(name, **overrides):
depr_path = overrides.get("path")
if depr_path not in (True, False, None):
deprecation_warning(Warnings.W001.format(path=depr_path))
return util.load_model(name, **overrides)
def blank(name, **kwargs):
LangClass = util.get_lang_class(name)
return LangClass(**kwargs)
def info(model=None, markdown=False, silent=False):
return cli_info(model, markdown, silent)
| Raise ValueError for narrow unicode build | Raise ValueError for narrow unicode build
| Python | mit | explosion/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,honnibal/spaCy,explosion/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy | # coding: utf8
from __future__ import unicode_literals
import warnings
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
# These are imported as part of the API
from thinc.neural.util import prefer_gpu, require_gpu
from .cli.info import info as cli_info
from .glossary import explain
from .about import __version__
from .errors import Warnings, deprecation_warning
from . import util
def load(name, **overrides):
depr_path = overrides.get("path")
if depr_path not in (True, False, None):
deprecation_warning(Warnings.W001.format(path=depr_path))
return util.load_model(name, **overrides)
def blank(name, **kwargs):
LangClass = util.get_lang_class(name)
return LangClass(**kwargs)
def info(model=None, markdown=False, silent=False):
return cli_info(model, markdown, silent)
Raise ValueError for narrow unicode build | # coding: utf8
from __future__ import unicode_literals
import warnings
import sys
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
# These are imported as part of the API
from thinc.neural.util import prefer_gpu, require_gpu
from .cli.info import info as cli_info
from .glossary import explain
from .about import __version__
from .errors import Warnings, deprecation_warning
from . import util
if __version__ >= '2.1.0' and sys.maxunicode <= 65535:
raise ValueError('''You are running a narrow unicode build,
which is incompatible with spacy >= 2.1.0, reinstall Python and use a
wide unicode build instead. You can also rebuild Python and
set the --enable-unicode=ucs4 flag.''')
def load(name, **overrides):
depr_path = overrides.get("path")
if depr_path not in (True, False, None):
deprecation_warning(Warnings.W001.format(path=depr_path))
return util.load_model(name, **overrides)
def blank(name, **kwargs):
LangClass = util.get_lang_class(name)
return LangClass(**kwargs)
def info(model=None, markdown=False, silent=False):
return cli_info(model, markdown, silent)
| <commit_before># coding: utf8
from __future__ import unicode_literals
import warnings
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
# These are imported as part of the API
from thinc.neural.util import prefer_gpu, require_gpu
from .cli.info import info as cli_info
from .glossary import explain
from .about import __version__
from .errors import Warnings, deprecation_warning
from . import util
def load(name, **overrides):
depr_path = overrides.get("path")
if depr_path not in (True, False, None):
deprecation_warning(Warnings.W001.format(path=depr_path))
return util.load_model(name, **overrides)
def blank(name, **kwargs):
LangClass = util.get_lang_class(name)
return LangClass(**kwargs)
def info(model=None, markdown=False, silent=False):
return cli_info(model, markdown, silent)
<commit_msg>Raise ValueError for narrow unicode build<commit_after> | # coding: utf8
from __future__ import unicode_literals
import warnings
import sys
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
# These are imported as part of the API
from thinc.neural.util import prefer_gpu, require_gpu
from .cli.info import info as cli_info
from .glossary import explain
from .about import __version__
from .errors import Warnings, deprecation_warning
from . import util
if __version__ >= '2.1.0' and sys.maxunicode <= 65535:
raise ValueError('''You are running a narrow unicode build,
which is incompatible with spacy >= 2.1.0, reinstall Python and use a
wide unicode build instead. You can also rebuild Python and
set the --enable-unicode=ucs4 flag.''')
def load(name, **overrides):
depr_path = overrides.get("path")
if depr_path not in (True, False, None):
deprecation_warning(Warnings.W001.format(path=depr_path))
return util.load_model(name, **overrides)
def blank(name, **kwargs):
LangClass = util.get_lang_class(name)
return LangClass(**kwargs)
def info(model=None, markdown=False, silent=False):
return cli_info(model, markdown, silent)
| # coding: utf8
from __future__ import unicode_literals
import warnings
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
# These are imported as part of the API
from thinc.neural.util import prefer_gpu, require_gpu
from .cli.info import info as cli_info
from .glossary import explain
from .about import __version__
from .errors import Warnings, deprecation_warning
from . import util
def load(name, **overrides):
depr_path = overrides.get("path")
if depr_path not in (True, False, None):
deprecation_warning(Warnings.W001.format(path=depr_path))
return util.load_model(name, **overrides)
def blank(name, **kwargs):
LangClass = util.get_lang_class(name)
return LangClass(**kwargs)
def info(model=None, markdown=False, silent=False):
return cli_info(model, markdown, silent)
Raise ValueError for narrow unicode build# coding: utf8
from __future__ import unicode_literals
import warnings
import sys
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
# These are imported as part of the API
from thinc.neural.util import prefer_gpu, require_gpu
from .cli.info import info as cli_info
from .glossary import explain
from .about import __version__
from .errors import Warnings, deprecation_warning
from . import util
if __version__ >= '2.1.0' and sys.maxunicode <= 65535:
raise ValueError('''You are running a narrow unicode build,
which is incompatible with spacy >= 2.1.0, reinstall Python and use a
wide unicode build instead. You can also rebuild Python and
set the --enable-unicode=ucs4 flag.''')
def load(name, **overrides):
depr_path = overrides.get("path")
if depr_path not in (True, False, None):
deprecation_warning(Warnings.W001.format(path=depr_path))
return util.load_model(name, **overrides)
def blank(name, **kwargs):
LangClass = util.get_lang_class(name)
return LangClass(**kwargs)
def info(model=None, markdown=False, silent=False):
return cli_info(model, markdown, silent)
| <commit_before># coding: utf8
from __future__ import unicode_literals
import warnings
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
# These are imported as part of the API
from thinc.neural.util import prefer_gpu, require_gpu
from .cli.info import info as cli_info
from .glossary import explain
from .about import __version__
from .errors import Warnings, deprecation_warning
from . import util
def load(name, **overrides):
depr_path = overrides.get("path")
if depr_path not in (True, False, None):
deprecation_warning(Warnings.W001.format(path=depr_path))
return util.load_model(name, **overrides)
def blank(name, **kwargs):
LangClass = util.get_lang_class(name)
return LangClass(**kwargs)
def info(model=None, markdown=False, silent=False):
return cli_info(model, markdown, silent)
<commit_msg>Raise ValueError for narrow unicode build<commit_after># coding: utf8
from __future__ import unicode_literals
import warnings
import sys
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
# These are imported as part of the API
from thinc.neural.util import prefer_gpu, require_gpu
from .cli.info import info as cli_info
from .glossary import explain
from .about import __version__
from .errors import Warnings, deprecation_warning
from . import util
if __version__ >= '2.1.0' and sys.maxunicode <= 65535:
raise ValueError('''You are running a narrow unicode build,
which is incompatible with spacy >= 2.1.0, reinstall Python and use a
wide unicode build instead. You can also rebuild Python and
set the --enable-unicode=ucs4 flag.''')
def load(name, **overrides):
depr_path = overrides.get("path")
if depr_path not in (True, False, None):
deprecation_warning(Warnings.W001.format(path=depr_path))
return util.load_model(name, **overrides)
def blank(name, **kwargs):
LangClass = util.get_lang_class(name)
return LangClass(**kwargs)
def info(model=None, markdown=False, silent=False):
return cli_info(model, markdown, silent)
|
ad4b6663a2de08fddc0ebef8a08e1f405c0cb80a | pkgcmp/cli.py | pkgcmp/cli.py | '''
Parse CLI options
'''
# Import python libs
import os
import copy
import argparse
# Import pkgcmp libs
import pkgcmp.scan
# Import third party libs
import yaml
DEFAULTS = {'cachedir': '/var/cache/pkgcmp'}
def parse():
'''
Parse!!
'''
parser = argparse.ArgumentParser(description='The pkgcmp map generator')
parser.add_argument(
'--cachedir',
dest='cachedir',
default=None,
help='The location to store all the files while working')
parser.add_argument(
'--config',
dest='config',
default='/etc/pkgcmp/pkgcmp',
help='The location of the pkgcmp config file')
opts = parser.parse_args().__dict__
conf = config(opts['config'])
for key in opts:
if opts[key] is not None:
conf[key] = opts[key]
return conf
def config(cfn):
'''
Read in the config file
'''
ret = copy.copy(DEFAULTS)
if os.path.isfile(cfn):
with open(cfn, 'r') as cfp:
conf = yaml.safe_load(cfp)
if isinstance(conf, dict):
ret.update(conf)
return ret
class PkgCmp:
'''
Build and run the application
'''
def __init__(self):
self.opts = parse()
self.scan = pkgcmp.scan.Scanner(self.opts)
def run(self):
self.scan.run()
| '''
Parse CLI options
'''
# Import python libs
import os
import copy
import argparse
# Import pkgcmp libs
import pkgcmp.scan
# Import third party libs
import yaml
DEFAULTS = {'cachedir': '/var/cache/pkgcmp',
'extension_modules': ''}
def parse():
'''
Parse!!
'''
parser = argparse.ArgumentParser(description='The pkgcmp map generator')
parser.add_argument(
'--cachedir',
dest='cachedir',
default=None,
help='The location to store all the files while working')
parser.add_argument(
'--config',
dest='config',
default='/etc/pkgcmp/pkgcmp',
help='The location of the pkgcmp config file')
opts = parser.parse_args().__dict__
conf = config(opts['config'])
for key in opts:
if opts[key] is not None:
conf[key] = opts[key]
return conf
def config(cfn):
'''
Read in the config file
'''
ret = copy.copy(DEFAULTS)
if os.path.isfile(cfn):
with open(cfn, 'r') as cfp:
conf = yaml.safe_load(cfp)
if isinstance(conf, dict):
ret.update(conf)
return ret
class PkgCmp:
'''
Build and run the application
'''
def __init__(self):
self.opts = parse()
self.scan = pkgcmp.scan.Scanner(self.opts)
def run(self):
self.scan.run()
| Add extension_modueles to the default configuration | Add extension_modueles to the default configuration
| Python | apache-2.0 | SS-RD/pkgcmp | '''
Parse CLI options
'''
# Import python libs
import os
import copy
import argparse
# Import pkgcmp libs
import pkgcmp.scan
# Import third party libs
import yaml
DEFAULTS = {'cachedir': '/var/cache/pkgcmp'}
def parse():
'''
Parse!!
'''
parser = argparse.ArgumentParser(description='The pkgcmp map generator')
parser.add_argument(
'--cachedir',
dest='cachedir',
default=None,
help='The location to store all the files while working')
parser.add_argument(
'--config',
dest='config',
default='/etc/pkgcmp/pkgcmp',
help='The location of the pkgcmp config file')
opts = parser.parse_args().__dict__
conf = config(opts['config'])
for key in opts:
if opts[key] is not None:
conf[key] = opts[key]
return conf
def config(cfn):
'''
Read in the config file
'''
ret = copy.copy(DEFAULTS)
if os.path.isfile(cfn):
with open(cfn, 'r') as cfp:
conf = yaml.safe_load(cfp)
if isinstance(conf, dict):
ret.update(conf)
return ret
class PkgCmp:
'''
Build and run the application
'''
def __init__(self):
self.opts = parse()
self.scan = pkgcmp.scan.Scanner(self.opts)
def run(self):
self.scan.run()
Add extension_modueles to the default configuration | '''
Parse CLI options
'''
# Import python libs
import os
import copy
import argparse
# Import pkgcmp libs
import pkgcmp.scan
# Import third party libs
import yaml
DEFAULTS = {'cachedir': '/var/cache/pkgcmp',
'extension_modules': ''}
def parse():
'''
Parse!!
'''
parser = argparse.ArgumentParser(description='The pkgcmp map generator')
parser.add_argument(
'--cachedir',
dest='cachedir',
default=None,
help='The location to store all the files while working')
parser.add_argument(
'--config',
dest='config',
default='/etc/pkgcmp/pkgcmp',
help='The location of the pkgcmp config file')
opts = parser.parse_args().__dict__
conf = config(opts['config'])
for key in opts:
if opts[key] is not None:
conf[key] = opts[key]
return conf
def config(cfn):
'''
Read in the config file
'''
ret = copy.copy(DEFAULTS)
if os.path.isfile(cfn):
with open(cfn, 'r') as cfp:
conf = yaml.safe_load(cfp)
if isinstance(conf, dict):
ret.update(conf)
return ret
class PkgCmp:
'''
Build and run the application
'''
def __init__(self):
self.opts = parse()
self.scan = pkgcmp.scan.Scanner(self.opts)
def run(self):
self.scan.run()
| <commit_before>'''
Parse CLI options
'''
# Import python libs
import os
import copy
import argparse
# Import pkgcmp libs
import pkgcmp.scan
# Import third party libs
import yaml
DEFAULTS = {'cachedir': '/var/cache/pkgcmp'}
def parse():
'''
Parse!!
'''
parser = argparse.ArgumentParser(description='The pkgcmp map generator')
parser.add_argument(
'--cachedir',
dest='cachedir',
default=None,
help='The location to store all the files while working')
parser.add_argument(
'--config',
dest='config',
default='/etc/pkgcmp/pkgcmp',
help='The location of the pkgcmp config file')
opts = parser.parse_args().__dict__
conf = config(opts['config'])
for key in opts:
if opts[key] is not None:
conf[key] = opts[key]
return conf
def config(cfn):
'''
Read in the config file
'''
ret = copy.copy(DEFAULTS)
if os.path.isfile(cfn):
with open(cfn, 'r') as cfp:
conf = yaml.safe_load(cfp)
if isinstance(conf, dict):
ret.update(conf)
return ret
class PkgCmp:
'''
Build and run the application
'''
def __init__(self):
self.opts = parse()
self.scan = pkgcmp.scan.Scanner(self.opts)
def run(self):
self.scan.run()
<commit_msg>Add extension_modueles to the default configuration<commit_after> | '''
Parse CLI options
'''
# Import python libs
import os
import copy
import argparse
# Import pkgcmp libs
import pkgcmp.scan
# Import third party libs
import yaml
DEFAULTS = {'cachedir': '/var/cache/pkgcmp',
'extension_modules': ''}
def parse():
'''
Parse!!
'''
parser = argparse.ArgumentParser(description='The pkgcmp map generator')
parser.add_argument(
'--cachedir',
dest='cachedir',
default=None,
help='The location to store all the files while working')
parser.add_argument(
'--config',
dest='config',
default='/etc/pkgcmp/pkgcmp',
help='The location of the pkgcmp config file')
opts = parser.parse_args().__dict__
conf = config(opts['config'])
for key in opts:
if opts[key] is not None:
conf[key] = opts[key]
return conf
def config(cfn):
'''
Read in the config file
'''
ret = copy.copy(DEFAULTS)
if os.path.isfile(cfn):
with open(cfn, 'r') as cfp:
conf = yaml.safe_load(cfp)
if isinstance(conf, dict):
ret.update(conf)
return ret
class PkgCmp:
'''
Build and run the application
'''
def __init__(self):
self.opts = parse()
self.scan = pkgcmp.scan.Scanner(self.opts)
def run(self):
self.scan.run()
| '''
Parse CLI options
'''
# Import python libs
import os
import copy
import argparse
# Import pkgcmp libs
import pkgcmp.scan
# Import third party libs
import yaml
DEFAULTS = {'cachedir': '/var/cache/pkgcmp'}
def parse():
'''
Parse!!
'''
parser = argparse.ArgumentParser(description='The pkgcmp map generator')
parser.add_argument(
'--cachedir',
dest='cachedir',
default=None,
help='The location to store all the files while working')
parser.add_argument(
'--config',
dest='config',
default='/etc/pkgcmp/pkgcmp',
help='The location of the pkgcmp config file')
opts = parser.parse_args().__dict__
conf = config(opts['config'])
for key in opts:
if opts[key] is not None:
conf[key] = opts[key]
return conf
def config(cfn):
'''
Read in the config file
'''
ret = copy.copy(DEFAULTS)
if os.path.isfile(cfn):
with open(cfn, 'r') as cfp:
conf = yaml.safe_load(cfp)
if isinstance(conf, dict):
ret.update(conf)
return ret
class PkgCmp:
'''
Build and run the application
'''
def __init__(self):
self.opts = parse()
self.scan = pkgcmp.scan.Scanner(self.opts)
def run(self):
self.scan.run()
Add extension_modueles to the default configuration'''
Parse CLI options
'''
# Import python libs
import os
import copy
import argparse
# Import pkgcmp libs
import pkgcmp.scan
# Import third party libs
import yaml
DEFAULTS = {'cachedir': '/var/cache/pkgcmp',
'extension_modules': ''}
def parse():
'''
Parse!!
'''
parser = argparse.ArgumentParser(description='The pkgcmp map generator')
parser.add_argument(
'--cachedir',
dest='cachedir',
default=None,
help='The location to store all the files while working')
parser.add_argument(
'--config',
dest='config',
default='/etc/pkgcmp/pkgcmp',
help='The location of the pkgcmp config file')
opts = parser.parse_args().__dict__
conf = config(opts['config'])
for key in opts:
if opts[key] is not None:
conf[key] = opts[key]
return conf
def config(cfn):
'''
Read in the config file
'''
ret = copy.copy(DEFAULTS)
if os.path.isfile(cfn):
with open(cfn, 'r') as cfp:
conf = yaml.safe_load(cfp)
if isinstance(conf, dict):
ret.update(conf)
return ret
class PkgCmp:
'''
Build and run the application
'''
def __init__(self):
self.opts = parse()
self.scan = pkgcmp.scan.Scanner(self.opts)
def run(self):
self.scan.run()
| <commit_before>'''
Parse CLI options
'''
# Import python libs
import os
import copy
import argparse
# Import pkgcmp libs
import pkgcmp.scan
# Import third party libs
import yaml
DEFAULTS = {'cachedir': '/var/cache/pkgcmp'}
def parse():
'''
Parse!!
'''
parser = argparse.ArgumentParser(description='The pkgcmp map generator')
parser.add_argument(
'--cachedir',
dest='cachedir',
default=None,
help='The location to store all the files while working')
parser.add_argument(
'--config',
dest='config',
default='/etc/pkgcmp/pkgcmp',
help='The location of the pkgcmp config file')
opts = parser.parse_args().__dict__
conf = config(opts['config'])
for key in opts:
if opts[key] is not None:
conf[key] = opts[key]
return conf
def config(cfn):
'''
Read in the config file
'''
ret = copy.copy(DEFAULTS)
if os.path.isfile(cfn):
with open(cfn, 'r') as cfp:
conf = yaml.safe_load(cfp)
if isinstance(conf, dict):
ret.update(conf)
return ret
class PkgCmp:
'''
Build and run the application
'''
def __init__(self):
self.opts = parse()
self.scan = pkgcmp.scan.Scanner(self.opts)
def run(self):
self.scan.run()
<commit_msg>Add extension_modueles to the default configuration<commit_after>'''
Parse CLI options
'''
# Import python libs
import os
import copy
import argparse
# Import pkgcmp libs
import pkgcmp.scan
# Import third party libs
import yaml
DEFAULTS = {'cachedir': '/var/cache/pkgcmp',
'extension_modules': ''}
def parse():
'''
Parse!!
'''
parser = argparse.ArgumentParser(description='The pkgcmp map generator')
parser.add_argument(
'--cachedir',
dest='cachedir',
default=None,
help='The location to store all the files while working')
parser.add_argument(
'--config',
dest='config',
default='/etc/pkgcmp/pkgcmp',
help='The location of the pkgcmp config file')
opts = parser.parse_args().__dict__
conf = config(opts['config'])
for key in opts:
if opts[key] is not None:
conf[key] = opts[key]
return conf
def config(cfn):
'''
Read in the config file
'''
ret = copy.copy(DEFAULTS)
if os.path.isfile(cfn):
with open(cfn, 'r') as cfp:
conf = yaml.safe_load(cfp)
if isinstance(conf, dict):
ret.update(conf)
return ret
class PkgCmp:
'''
Build and run the application
'''
def __init__(self):
self.opts = parse()
self.scan = pkgcmp.scan.Scanner(self.opts)
def run(self):
self.scan.run()
|
88773c6757540c9f1d4dfca2a287512e74bdbc24 | python_scripts/mc_config.py | python_scripts/mc_config.py | #!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
def read_config():
yml_file = open(_config_file_name, 'rb')
config_file = yaml.load( yml_file )
return config_file
| #!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
_defaults_config_file_base_name = 'defaults.yml'
_defaults_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '../config', _defaults_config_file_base_name))
def _load_yml( file_path ):
yml_file = open(file_path, 'rb')
config_file = yaml.load( yml_file )
return config_file
def deep_merge( original, update ):
for key, value in update.iteritems():
if not key in original:
original[ key ] = value
elif isinstance( value, dict) and isinstance( original[key], dict):
deep_merge( original[ key ], value )
return original
def read_config():
config_file = _load_yml( _config_file_name )
defaults_file = _load_yml( _defaults_config_file_name )
# print "config_file"
# print config_file
# print "defaults_file"
# print defaults_file
config_file = deep_merge( config_file, defaults_file )
# print "Merged"
# print config_file
return config_file
| Read the defaults config file and merge it with mediawords.yml | Read the defaults config file and merge it with mediawords.yml
| Python | agpl-3.0 | berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud | #!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
def read_config():
yml_file = open(_config_file_name, 'rb')
config_file = yaml.load( yml_file )
return config_file
Read the defaults config file and merge it with mediawords.yml | #!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
_defaults_config_file_base_name = 'defaults.yml'
_defaults_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '../config', _defaults_config_file_base_name))
def _load_yml( file_path ):
yml_file = open(file_path, 'rb')
config_file = yaml.load( yml_file )
return config_file
def deep_merge( original, update ):
for key, value in update.iteritems():
if not key in original:
original[ key ] = value
elif isinstance( value, dict) and isinstance( original[key], dict):
deep_merge( original[ key ], value )
return original
def read_config():
config_file = _load_yml( _config_file_name )
defaults_file = _load_yml( _defaults_config_file_name )
# print "config_file"
# print config_file
# print "defaults_file"
# print defaults_file
config_file = deep_merge( config_file, defaults_file )
# print "Merged"
# print config_file
return config_file
| <commit_before>#!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
def read_config():
yml_file = open(_config_file_name, 'rb')
config_file = yaml.load( yml_file )
return config_file
<commit_msg>Read the defaults config file and merge it with mediawords.yml<commit_after> | #!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
_defaults_config_file_base_name = 'defaults.yml'
_defaults_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '../config', _defaults_config_file_base_name))
def _load_yml( file_path ):
yml_file = open(file_path, 'rb')
config_file = yaml.load( yml_file )
return config_file
def deep_merge( original, update ):
for key, value in update.iteritems():
if not key in original:
original[ key ] = value
elif isinstance( value, dict) and isinstance( original[key], dict):
deep_merge( original[ key ], value )
return original
def read_config():
config_file = _load_yml( _config_file_name )
defaults_file = _load_yml( _defaults_config_file_name )
# print "config_file"
# print config_file
# print "defaults_file"
# print defaults_file
config_file = deep_merge( config_file, defaults_file )
# print "Merged"
# print config_file
return config_file
| #!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
def read_config():
yml_file = open(_config_file_name, 'rb')
config_file = yaml.load( yml_file )
return config_file
Read the defaults config file and merge it with mediawords.yml#!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
_defaults_config_file_base_name = 'defaults.yml'
_defaults_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '../config', _defaults_config_file_base_name))
def _load_yml( file_path ):
yml_file = open(file_path, 'rb')
config_file = yaml.load( yml_file )
return config_file
def deep_merge( original, update ):
for key, value in update.iteritems():
if not key in original:
original[ key ] = value
elif isinstance( value, dict) and isinstance( original[key], dict):
deep_merge( original[ key ], value )
return original
def read_config():
config_file = _load_yml( _config_file_name )
defaults_file = _load_yml( _defaults_config_file_name )
# print "config_file"
# print config_file
# print "defaults_file"
# print defaults_file
config_file = deep_merge( config_file, defaults_file )
# print "Merged"
# print config_file
return config_file
| <commit_before>#!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
def read_config():
yml_file = open(_config_file_name, 'rb')
config_file = yaml.load( yml_file )
return config_file
<commit_msg>Read the defaults config file and merge it with mediawords.yml<commit_after>#!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
_defaults_config_file_base_name = 'defaults.yml'
_defaults_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '../config', _defaults_config_file_base_name))
def _load_yml( file_path ):
yml_file = open(file_path, 'rb')
config_file = yaml.load( yml_file )
return config_file
def deep_merge( original, update ):
for key, value in update.iteritems():
if not key in original:
original[ key ] = value
elif isinstance( value, dict) and isinstance( original[key], dict):
deep_merge( original[ key ], value )
return original
def read_config():
config_file = _load_yml( _config_file_name )
defaults_file = _load_yml( _defaults_config_file_name )
# print "config_file"
# print config_file
# print "defaults_file"
# print defaults_file
config_file = deep_merge( config_file, defaults_file )
# print "Merged"
# print config_file
return config_file
|
b790d32260a08f61be59ee5783bdbf07ed3e1c66 | python/labs/quick-banking-app/starter-code/banking.py | python/labs/quick-banking-app/starter-code/banking.py | #!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Replace "pass" with your code
class BankAccount():
pass | #!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Replace "pass" with your code
class BankAccount(object):
pass
| Use new-style classes, following the same fix in the curriculum. | Use new-style classes, following the same fix in the curriculum.
| Python | apache-2.0 | google/cssi-labs,google/cssi-labs | #!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Replace "pass" with your code
class BankAccount():
passUse new-style classes, following the same fix in the curriculum. | #!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Replace "pass" with your code
class BankAccount(object):
pass
| <commit_before>#!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Replace "pass" with your code
class BankAccount():
pass<commit_msg>Use new-style classes, following the same fix in the curriculum.<commit_after> | #!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Replace "pass" with your code
class BankAccount(object):
pass
| #!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Replace "pass" with your code
class BankAccount():
passUse new-style classes, following the same fix in the curriculum.#!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Replace "pass" with your code
class BankAccount(object):
pass
| <commit_before>#!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Replace "pass" with your code
class BankAccount():
pass<commit_msg>Use new-style classes, following the same fix in the curriculum.<commit_after>#!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Replace "pass" with your code
class BankAccount(object):
pass
|
f51162070c61fe9d3906c7d741432356a08a4ce6 | openstack/__init__.py | openstack/__init__.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack import connection # NOQA
from openstack import exceptions # NOQA
from openstack import profile # NOQA
from openstack import utils # NOQA
| Make end-user modules accessible from top level | Make end-user modules accessible from top level
There are several modules that end-users are expected to be
using--connection, profile, exceptions, and utils--which could be made
slightly more accessible by having them available in the top level
`openstack` namespace. This change proposes importing `from openstack
import <x>` in openstack.__init__.py so that end-users can do `import
openstack` and then work from there.
Nothing more than the typical end-user entry points are to be exposed
from there, so if you need to get the server resource you still need
to do `from openstack.compute.v2 import server`. Additionally, we should
continue documenting and using in tests everything in the `from
openstack import <x>` format ourselves as this is merely a convenience.
Change-Id: I24a7ce3636b18287fcb2246fbdfa9f8b6767f323
| Python | apache-2.0 | dtroyer/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,openstack/python-openstacksdk,briancurtin/python-openstacksdk,dtroyer/python-openstacksdk,stackforge/python-openstacksdk,briancurtin/python-openstacksdk | Make end-user modules accessible from top level
There are several modules that end-users are expected to be
using--connection, profile, exceptions, and utils--which could be made
slightly more accessible by having them available in the top level
`openstack` namespace. This change proposes importing `from openstack
import <x>` in openstack.__init__.py so that end-users can do `import
openstack` and then work from there.
Nothing more than the typical end-user entry points are to be exposed
from there, so if you need to get the server resource you still need
to do `from openstack.compute.v2 import server`. Additionally, we should
continue documenting and using in tests everything in the `from
openstack import <x>` format ourselves as this is merely a convenience.
Change-Id: I24a7ce3636b18287fcb2246fbdfa9f8b6767f323 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack import connection # NOQA
from openstack import exceptions # NOQA
from openstack import profile # NOQA
from openstack import utils # NOQA
| <commit_before><commit_msg>Make end-user modules accessible from top level
There are several modules that end-users are expected to be
using--connection, profile, exceptions, and utils--which could be made
slightly more accessible by having them available in the top level
`openstack` namespace. This change proposes importing `from openstack
import <x>` in openstack.__init__.py so that end-users can do `import
openstack` and then work from there.
Nothing more than the typical end-user entry points are to be exposed
from there, so if you need to get the server resource you still need
to do `from openstack.compute.v2 import server`. Additionally, we should
continue documenting and using in tests everything in the `from
openstack import <x>` format ourselves as this is merely a convenience.
Change-Id: I24a7ce3636b18287fcb2246fbdfa9f8b6767f323<commit_after> | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack import connection # NOQA
from openstack import exceptions # NOQA
from openstack import profile # NOQA
from openstack import utils # NOQA
| Make end-user modules accessible from top level
There are several modules that end-users are expected to be
using--connection, profile, exceptions, and utils--which could be made
slightly more accessible by having them available in the top level
`openstack` namespace. This change proposes importing `from openstack
import <x>` in openstack.__init__.py so that end-users can do `import
openstack` and then work from there.
Nothing more than the typical end-user entry points are to be exposed
from there, so if you need to get the server resource you still need
to do `from openstack.compute.v2 import server`. Additionally, we should
continue documenting and using in tests everything in the `from
openstack import <x>` format ourselves as this is merely a convenience.
Change-Id: I24a7ce3636b18287fcb2246fbdfa9f8b6767f323# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack import connection # NOQA
from openstack import exceptions # NOQA
from openstack import profile # NOQA
from openstack import utils # NOQA
| <commit_before><commit_msg>Make end-user modules accessible from top level
There are several modules that end-users are expected to be
using--connection, profile, exceptions, and utils--which could be made
slightly more accessible by having them available in the top level
`openstack` namespace. This change proposes importing `from openstack
import <x>` in openstack.__init__.py so that end-users can do `import
openstack` and then work from there.
Nothing more than the typical end-user entry points are to be exposed
from there, so if you need to get the server resource you still need
to do `from openstack.compute.v2 import server`. Additionally, we should
continue documenting and using in tests everything in the `from
openstack import <x>` format ourselves as this is merely a convenience.
Change-Id: I24a7ce3636b18287fcb2246fbdfa9f8b6767f323<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack import connection # NOQA
from openstack import exceptions # NOQA
from openstack import profile # NOQA
from openstack import utils # NOQA
| |
25ba4aea17d869022682fd70d4c3ccbade19955f | openfisca_country_template/situation_examples/__init__.py | openfisca_country_template/situation_examples/__init__.py | """This file provides a function to load json example situations."""
import json
import os
DIR_PATH = os.path.dirname(os.path.abspath(__file__))
def parse(file_name):
"""Load json example situations."""
file_path = os.path.join(DIR_PATH, file_name)
with open(file_path, "r") as file:
return json.loads(file.read())
single = parse("single.json")
couple = parse("couple.json")
| """This file provides a function to load json example situations."""
import json
import os
DIR_PATH = os.path.dirname(os.path.abspath(__file__))
def parse(file_name):
"""Load json example situations."""
file_path = os.path.join(DIR_PATH, file_name)
with open(file_path, "r", encoding="utf8") as file:
return json.loads(file.read())
single = parse("single.json")
couple = parse("couple.json")
| Add encoding to open file | Add encoding to open file
| Python | agpl-3.0 | openfisca/country-template,openfisca/country-template | """This file provides a function to load json example situations."""
import json
import os
DIR_PATH = os.path.dirname(os.path.abspath(__file__))
def parse(file_name):
"""Load json example situations."""
file_path = os.path.join(DIR_PATH, file_name)
with open(file_path, "r") as file:
return json.loads(file.read())
single = parse("single.json")
couple = parse("couple.json")
Add encoding to open file | """This file provides a function to load json example situations."""
import json
import os
DIR_PATH = os.path.dirname(os.path.abspath(__file__))
def parse(file_name):
"""Load json example situations."""
file_path = os.path.join(DIR_PATH, file_name)
with open(file_path, "r", encoding="utf8") as file:
return json.loads(file.read())
single = parse("single.json")
couple = parse("couple.json")
| <commit_before>"""This file provides a function to load json example situations."""
import json
import os
DIR_PATH = os.path.dirname(os.path.abspath(__file__))
def parse(file_name):
"""Load json example situations."""
file_path = os.path.join(DIR_PATH, file_name)
with open(file_path, "r") as file:
return json.loads(file.read())
single = parse("single.json")
couple = parse("couple.json")
<commit_msg>Add encoding to open file<commit_after> | """This file provides a function to load json example situations."""
import json
import os
DIR_PATH = os.path.dirname(os.path.abspath(__file__))
def parse(file_name):
"""Load json example situations."""
file_path = os.path.join(DIR_PATH, file_name)
with open(file_path, "r", encoding="utf8") as file:
return json.loads(file.read())
single = parse("single.json")
couple = parse("couple.json")
| """This file provides a function to load json example situations."""
import json
import os
DIR_PATH = os.path.dirname(os.path.abspath(__file__))
def parse(file_name):
"""Load json example situations."""
file_path = os.path.join(DIR_PATH, file_name)
with open(file_path, "r") as file:
return json.loads(file.read())
single = parse("single.json")
couple = parse("couple.json")
Add encoding to open file"""This file provides a function to load json example situations."""
import json
import os
DIR_PATH = os.path.dirname(os.path.abspath(__file__))
def parse(file_name):
"""Load json example situations."""
file_path = os.path.join(DIR_PATH, file_name)
with open(file_path, "r", encoding="utf8") as file:
return json.loads(file.read())
single = parse("single.json")
couple = parse("couple.json")
| <commit_before>"""This file provides a function to load json example situations."""
import json
import os
DIR_PATH = os.path.dirname(os.path.abspath(__file__))
def parse(file_name):
"""Load json example situations."""
file_path = os.path.join(DIR_PATH, file_name)
with open(file_path, "r") as file:
return json.loads(file.read())
single = parse("single.json")
couple = parse("couple.json")
<commit_msg>Add encoding to open file<commit_after>"""This file provides a function to load json example situations."""
import json
import os
DIR_PATH = os.path.dirname(os.path.abspath(__file__))
def parse(file_name):
"""Load json example situations."""
file_path = os.path.join(DIR_PATH, file_name)
with open(file_path, "r", encoding="utf8") as file:
return json.loads(file.read())
single = parse("single.json")
couple = parse("couple.json")
|
eb1921615cd9070564d09c934d2c687897619c3a | froide/campaign/listeners.py | froide/campaign/listeners.py | from .models import Campaign
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
try:
campaign = Campaign.objects.get(ident=namespace)
except Campaign.DoesNotExist:
return
sender.campaign = campaign
sender.save()
| from .models import Campaign
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
try:
campaign = Campaign.objects.get(ident=namespace)
except Campaign.DoesNotExist:
return
sender.campaign = campaign
sender.save()
sender.user.tags.add(campaign.ident)
if not sender.user.is_active:
# First-time requester
sender.user.tags.add('%s-first' % campaign.ident)
| Add tags of campaign to user | Add tags of campaign to user | Python | mit | stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide | from .models import Campaign
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
try:
campaign = Campaign.objects.get(ident=namespace)
except Campaign.DoesNotExist:
return
sender.campaign = campaign
sender.save()
Add tags of campaign to user | from .models import Campaign
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
try:
campaign = Campaign.objects.get(ident=namespace)
except Campaign.DoesNotExist:
return
sender.campaign = campaign
sender.save()
sender.user.tags.add(campaign.ident)
if not sender.user.is_active:
# First-time requester
sender.user.tags.add('%s-first' % campaign.ident)
| <commit_before>from .models import Campaign
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
try:
campaign = Campaign.objects.get(ident=namespace)
except Campaign.DoesNotExist:
return
sender.campaign = campaign
sender.save()
<commit_msg>Add tags of campaign to user<commit_after> | from .models import Campaign
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
try:
campaign = Campaign.objects.get(ident=namespace)
except Campaign.DoesNotExist:
return
sender.campaign = campaign
sender.save()
sender.user.tags.add(campaign.ident)
if not sender.user.is_active:
# First-time requester
sender.user.tags.add('%s-first' % campaign.ident)
| from .models import Campaign
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
try:
campaign = Campaign.objects.get(ident=namespace)
except Campaign.DoesNotExist:
return
sender.campaign = campaign
sender.save()
Add tags of campaign to userfrom .models import Campaign
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
try:
campaign = Campaign.objects.get(ident=namespace)
except Campaign.DoesNotExist:
return
sender.campaign = campaign
sender.save()
sender.user.tags.add(campaign.ident)
if not sender.user.is_active:
# First-time requester
sender.user.tags.add('%s-first' % campaign.ident)
| <commit_before>from .models import Campaign
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
try:
campaign = Campaign.objects.get(ident=namespace)
except Campaign.DoesNotExist:
return
sender.campaign = campaign
sender.save()
<commit_msg>Add tags of campaign to user<commit_after>from .models import Campaign
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
try:
campaign = Campaign.objects.get(ident=namespace)
except Campaign.DoesNotExist:
return
sender.campaign = campaign
sender.save()
sender.user.tags.add(campaign.ident)
if not sender.user.is_active:
# First-time requester
sender.user.tags.add('%s-first' % campaign.ident)
|
b8a5655520449148e5f71790f85dfafd84faebec | python/peacock/tests/postprocessor_tab/gold/TestPostprocessorPluginManager_test_script.py | python/peacock/tests/postprocessor_tab/gold/TestPostprocessorPluginManager_test_script.py | #* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.939999999999998])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
| """
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.939999999999998])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
| Remove header added to gold file | Remove header added to gold file
| Python | lgpl-2.1 | nuclear-wizard/moose,YaqiWang/moose,harterj/moose,milljm/moose,sapitts/moose,dschwen/moose,jessecarterMOOSE/moose,sapitts/moose,andrsd/moose,sapitts/moose,lindsayad/moose,andrsd/moose,harterj/moose,jessecarterMOOSE/moose,idaholab/moose,jessecarterMOOSE/moose,dschwen/moose,lindsayad/moose,dschwen/moose,SudiptaBiswas/moose,permcody/moose,laagesen/moose,SudiptaBiswas/moose,SudiptaBiswas/moose,laagesen/moose,milljm/moose,milljm/moose,jessecarterMOOSE/moose,YaqiWang/moose,YaqiWang/moose,nuclear-wizard/moose,lindsayad/moose,dschwen/moose,bwspenc/moose,laagesen/moose,permcody/moose,idaholab/moose,lindsayad/moose,nuclear-wizard/moose,dschwen/moose,idaholab/moose,bwspenc/moose,jessecarterMOOSE/moose,andrsd/moose,milljm/moose,SudiptaBiswas/moose,idaholab/moose,andrsd/moose,harterj/moose,lindsayad/moose,bwspenc/moose,YaqiWang/moose,SudiptaBiswas/moose,nuclear-wizard/moose,sapitts/moose,permcody/moose,bwspenc/moose,milljm/moose,harterj/moose,sapitts/moose,laagesen/moose,bwspenc/moose,laagesen/moose,andrsd/moose,harterj/moose,permcody/moose,idaholab/moose | #* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.939999999999998])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
Remove header added to gold file | """
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.939999999999998])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
| <commit_before>#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.939999999999998])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
<commit_msg>Remove header added to gold file<commit_after> | """
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.939999999999998])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
| #* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.939999999999998])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
Remove header added to gold file"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.939999999999998])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
| <commit_before>#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.939999999999998])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
<commit_msg>Remove header added to gold file<commit_after>"""
python TestPostprocessorPluginManager_test_script.py
"""
import matplotlib.pyplot as plt
import mooseutils
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
axes1 = axes0.twinx()
# Read Postprocessor Data
data = mooseutils.PostprocessorReader('../input/white_elephant_jan_2016.csv')
x = data('time')
y = data('air_temp_set_1')
axes1.plot(x, y, marker='', linewidth=5.0, color=[0.2, 0.627, 0.173, 1.0], markersize=1, linestyle=u'--', label='air_temp_set_1')
# Axes Settings
axes1.legend(loc='lower right')
axes0.set_title('Snow Data')
# y1-axis Settings
axes1.set_ylabel('Air Temperature [C]')
axes1.set_ylim([0.0, 35.939999999999998])
# Show figure and write pdf
plt.show()
figure.savefig("output.pdf")
|
2a31c4542cec4c46d22ec6cd905bb60813810cef | web/geosearch/tests/test_bag_dataset.py | web/geosearch/tests/test_bag_dataset.py | import unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 7)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 7)
if __name__ == '__main__':
unittest.main()
| import unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 7)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 6)
if __name__ == '__main__':
unittest.main()
| Remove WKPB from geosearch - also change test | Remove WKPB from geosearch - also change test
| Python | mpl-2.0 | DatapuntAmsterdam/datapunt_geosearch,DatapuntAmsterdam/datapunt_geosearch | import unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 7)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 7)
if __name__ == '__main__':
unittest.main()
Remove WKPB from geosearch - also change test | import unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 7)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 6)
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 7)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 7)
if __name__ == '__main__':
unittest.main()
<commit_msg>Remove WKPB from geosearch - also change test<commit_after> | import unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 7)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 6)
if __name__ == '__main__':
unittest.main()
| import unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 7)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 7)
if __name__ == '__main__':
unittest.main()
Remove WKPB from geosearch - also change testimport unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 7)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 6)
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 7)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 7)
if __name__ == '__main__':
unittest.main()
<commit_msg>Remove WKPB from geosearch - also change test<commit_after>import unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 7)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 6)
if __name__ == '__main__':
unittest.main()
|
e53c66f9ab12fe0c90c447176b083513cd3a4cf5 | store/urls.py | store/urls.py | from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
url(r'^review/$', product_review, name='review')
]
| from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^review/$', product_review, name='review'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
]
| Move products:review URLConf above product:detail | Move products:review URLConf above product:detail
The product:detail view is greedy and previously caused the review
URLConf never to be resolved by the correct view
| Python | bsd-3-clause | kevgathuku/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop,andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,andela-kndungu/compshop | from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
url(r'^review/$', product_review, name='review')
]
Move products:review URLConf above product:detail
The product:detail view is greedy and previously caused the review
URLConf never to be resolved by the correct view | from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^review/$', product_review, name='review'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
]
| <commit_before>from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
url(r'^review/$', product_review, name='review')
]
<commit_msg>Move products:review URLConf above product:detail
The product:detail view is greedy and previously caused the review
URLConf never to be resolved by the correct view<commit_after> | from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^review/$', product_review, name='review'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
]
| from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
url(r'^review/$', product_review, name='review')
]
Move products:review URLConf above product:detail
The product:detail view is greedy and previously caused the review
URLConf never to be resolved by the correct viewfrom django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^review/$', product_review, name='review'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
]
| <commit_before>from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
url(r'^review/$', product_review, name='review')
]
<commit_msg>Move products:review URLConf above product:detail
The product:detail view is greedy and previously caused the review
URLConf never to be resolved by the correct view<commit_after>from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^review/$', product_review, name='review'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
]
|
b68bf4c1b2ff2e152311adeb0b88e1192d5c4093 | Pyblosxom/_version.py | Pyblosxom/_version.py | #######################################################################
# This file is part of Pyblosxom.
#
# Copyright (C) 2003-2011 by the Pyblosxom team. See AUTHORS.
#
# Pyblosxom is distributed under the MIT license. See the file
# LICENSE for distribution details.
#######################################################################
# valid version formats:
# * x.y - final release
# * x.ya1 - alpha 1
# * x.yb1 - beta 1
# * x.yrc1 - release candidate 1
# * x.y.dev - dev
# see http://www.python.org/dev/peps/pep-0386/
__version__ = "1.5.3"
| #######################################################################
# This file is part of Pyblosxom.
#
# Copyright (C) 2003-2011 by the Pyblosxom team. See AUTHORS.
#
# Pyblosxom is distributed under the MIT license. See the file
# LICENSE for distribution details.
#######################################################################
# valid version formats:
# * x.y - final release
# * x.ya1 - alpha 1
# * x.yb1 - beta 1
# * x.yrc1 - release candidate 1
# * x.y.dev - dev
# see http://www.python.org/dev/peps/pep-0386/
__version__ = "1.5.3.wgkg"
| Update version for local development | Update version for local development
Conflicts:
Pyblosxom/_version.py
| Python | mit | willkg/douglas,willkg/douglas | #######################################################################
# This file is part of Pyblosxom.
#
# Copyright (C) 2003-2011 by the Pyblosxom team. See AUTHORS.
#
# Pyblosxom is distributed under the MIT license. See the file
# LICENSE for distribution details.
#######################################################################
# valid version formats:
# * x.y - final release
# * x.ya1 - alpha 1
# * x.yb1 - beta 1
# * x.yrc1 - release candidate 1
# * x.y.dev - dev
# see http://www.python.org/dev/peps/pep-0386/
__version__ = "1.5.3"
Update version for local development
Conflicts:
Pyblosxom/_version.py | #######################################################################
# This file is part of Pyblosxom.
#
# Copyright (C) 2003-2011 by the Pyblosxom team. See AUTHORS.
#
# Pyblosxom is distributed under the MIT license. See the file
# LICENSE for distribution details.
#######################################################################
# valid version formats:
# * x.y - final release
# * x.ya1 - alpha 1
# * x.yb1 - beta 1
# * x.yrc1 - release candidate 1
# * x.y.dev - dev
# see http://www.python.org/dev/peps/pep-0386/
__version__ = "1.5.3.wgkg"
| <commit_before>#######################################################################
# This file is part of Pyblosxom.
#
# Copyright (C) 2003-2011 by the Pyblosxom team. See AUTHORS.
#
# Pyblosxom is distributed under the MIT license. See the file
# LICENSE for distribution details.
#######################################################################
# valid version formats:
# * x.y - final release
# * x.ya1 - alpha 1
# * x.yb1 - beta 1
# * x.yrc1 - release candidate 1
# * x.y.dev - dev
# see http://www.python.org/dev/peps/pep-0386/
__version__ = "1.5.3"
<commit_msg>Update version for local development
Conflicts:
Pyblosxom/_version.py<commit_after> | #######################################################################
# This file is part of Pyblosxom.
#
# Copyright (C) 2003-2011 by the Pyblosxom team. See AUTHORS.
#
# Pyblosxom is distributed under the MIT license. See the file
# LICENSE for distribution details.
#######################################################################
# valid version formats:
# * x.y - final release
# * x.ya1 - alpha 1
# * x.yb1 - beta 1
# * x.yrc1 - release candidate 1
# * x.y.dev - dev
# see http://www.python.org/dev/peps/pep-0386/
__version__ = "1.5.3.wgkg"
| #######################################################################
# This file is part of Pyblosxom.
#
# Copyright (C) 2003-2011 by the Pyblosxom team. See AUTHORS.
#
# Pyblosxom is distributed under the MIT license. See the file
# LICENSE for distribution details.
#######################################################################
# valid version formats:
# * x.y - final release
# * x.ya1 - alpha 1
# * x.yb1 - beta 1
# * x.yrc1 - release candidate 1
# * x.y.dev - dev
# see http://www.python.org/dev/peps/pep-0386/
__version__ = "1.5.3"
Update version for local development
Conflicts:
Pyblosxom/_version.py#######################################################################
# This file is part of Pyblosxom.
#
# Copyright (C) 2003-2011 by the Pyblosxom team. See AUTHORS.
#
# Pyblosxom is distributed under the MIT license. See the file
# LICENSE for distribution details.
#######################################################################
# valid version formats:
# * x.y - final release
# * x.ya1 - alpha 1
# * x.yb1 - beta 1
# * x.yrc1 - release candidate 1
# * x.y.dev - dev
# see http://www.python.org/dev/peps/pep-0386/
__version__ = "1.5.3.wgkg"
| <commit_before>#######################################################################
# This file is part of Pyblosxom.
#
# Copyright (C) 2003-2011 by the Pyblosxom team. See AUTHORS.
#
# Pyblosxom is distributed under the MIT license. See the file
# LICENSE for distribution details.
#######################################################################
# valid version formats:
# * x.y - final release
# * x.ya1 - alpha 1
# * x.yb1 - beta 1
# * x.yrc1 - release candidate 1
# * x.y.dev - dev
# see http://www.python.org/dev/peps/pep-0386/
__version__ = "1.5.3"
<commit_msg>Update version for local development
Conflicts:
Pyblosxom/_version.py<commit_after>#######################################################################
# This file is part of Pyblosxom.
#
# Copyright (C) 2003-2011 by the Pyblosxom team. See AUTHORS.
#
# Pyblosxom is distributed under the MIT license. See the file
# LICENSE for distribution details.
#######################################################################
# valid version formats:
# * x.y - final release
# * x.ya1 - alpha 1
# * x.yb1 - beta 1
# * x.yrc1 - release candidate 1
# * x.y.dev - dev
# see http://www.python.org/dev/peps/pep-0386/
__version__ = "1.5.3.wgkg"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.