commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
e289c48727573a43a062213fd52bc43a2781bd8b | indra/trips/trips_api.py | indra/trips/trips_api.py | import sys
import trips_client
from processor import TripsProcessor
def process_text(text):
html = trips_client.send_query(text)
xml = trips_client.get_xml(html)
trips_client.save_xml(xml, 'test.xml')
return process_xml(xml)
def process_xml(xml_string):
tp = TripsProcessor(xml_string)
tp.get_complexes()
tp.get_phosphorylation()
tp.get_activating_mods()
tp.get_activations()
return tp
if __name__ == '__main__':
input_fname = 'phosphorylate.xml'
if len(sys.argv) > 1:
input_fname = sys.argv[1]
try:
fh = open(input_fname, 'rt')
except IOError:
print 'Could not open file %s' % input_fname
sys.exit()
xml_string = fh.read()
tp = TripsProcessor(xml_string)
tp.get_complexes()
tp.get_phosphorylation()
| import sys
import trips_client
from processor import TripsProcessor
def process_text(text, save_xml_name='trips_output.xml'):
html = trips_client.send_query(text)
xml = trips_client.get_xml(html)
if save_xml_name:
trips_client.save_xml(xml, save_xml_name)
return process_xml(xml)
def process_xml(xml_string):
tp = TripsProcessor(xml_string)
tp.get_complexes()
tp.get_phosphorylation()
tp.get_activating_mods()
tp.get_activations()
return tp
if __name__ == '__main__':
input_fname = 'phosphorylate.xml'
if len(sys.argv) > 1:
input_fname = sys.argv[1]
try:
fh = open(input_fname, 'rt')
except IOError:
print 'Could not open file %s' % input_fname
sys.exit()
xml_string = fh.read()
tp = TripsProcessor(xml_string)
tp.get_complexes()
tp.get_phosphorylation()
| Add save xml name argument to TRIPS API | Add save xml name argument to TRIPS API
| Python | bsd-2-clause | jmuhlich/indra,pvtodorov/indra,bgyori/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/indra,sorgerlab/belpy,pvtodorov/indra,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,bgyori/indra,johnbachman/indra,jmuhlich/indra,jmuhlich/indra,johnbachman/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/belpy,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra | ---
+++
@@ -3,10 +3,11 @@
from processor import TripsProcessor
-def process_text(text):
+def process_text(text, save_xml_name='trips_output.xml'):
html = trips_client.send_query(text)
xml = trips_client.get_xml(html)
- trips_client.save_xml(xml, 'test.xml')
+ if save_xml_name:
+ trips_client.save_xml(xml, save_xml_name)
return process_xml(xml)
|
62e5867f9dc5a758e3803e66043255881c8250c2 | democracy_club/apps/dc_members/forms.py | democracy_club/apps/dc_members/forms.py | from django.forms import ModelForm
from localflavor.gb.forms import GBPostcodeField
from .models import Member
class MemberUpdateForm(ModelForm):
class Meta:
model = Member
exclude = ['token', 'user', 'constituency', 'mapit_json']
postcode = GBPostcodeField(required=True) | from django.forms import ModelForm
from localflavor.gb.forms import GBPostcodeField
from .models import Member
class MemberUpdateForm(ModelForm):
class Meta:
model = Member
exclude = [
'token',
'user',
'constituency',
'mapit_json',
'source',
]
postcode = GBPostcodeField(required=True)
| Exclude most fields from User Profiles | Exclude most fields from User Profiles
| Python | bsd-3-clause | DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website | ---
+++
@@ -7,6 +7,12 @@
class MemberUpdateForm(ModelForm):
class Meta:
model = Member
- exclude = ['token', 'user', 'constituency', 'mapit_json']
+ exclude = [
+ 'token',
+ 'user',
+ 'constituency',
+ 'mapit_json',
+ 'source',
+ ]
postcode = GBPostcodeField(required=True) |
c608e7c88c4971e647171014ac5c8e77ecb0df34 | braid/info.py | braid/info.py | from fabric.api import run, quiet
from braid import succeeds, cacheInEnvironment
@cacheInEnvironment
def distroName():
"""
Get the name of the distro.
"""
with quiet():
lsb = run('/usr/bin/lsb_release --id --short', warn_only=True)
if lsb.succeeded:
return lsb.lower()
distros = [
('centos', '/etc/centos-release'),
('fedora', '/etc/fedora-release'),
]
for distro, sentinel in distros:
if succeeds('/usr/bin/test -f {}'.format(sentinel)):
return distro
def distroFamily():
"""
Get the family of the distro.
@returns: C{'debian'} or C{'fedora'}
"""
families = {
'debian': ['debian', 'ubuntu'],
'fedora': ['fedora', 'centos', 'rhel'],
}
distro = distroName()
for family, members in families.iteritems():
if distro in members:
return family
return 'other'
| from fabric.api import run, quiet
from braid import succeeds, cacheInEnvironment
@cacheInEnvironment
def distroName():
"""
Get the name of the distro.
"""
with quiet():
lsb = run('/usr/bin/lsb_release --id --short', warn_only=True)
if lsb.succeeded:
return lsb.lower()
distros = [
('centos', '/etc/centos-release'),
('fedora', '/etc/fedora-release'),
]
for distro, sentinel in distros:
if succeeds('/usr/bin/test -f {}'.format(sentinel)):
return distro
def distroFamily():
"""
Get the family of the distro.
@returns: C{'debian'} or C{'fedora'}
"""
families = {
'debian': ['debian', 'ubuntu'],
'fedora': ['fedora', 'centos', 'rhel'],
}
distro = distroName()
for family, members in families.iteritems():
if distro in members:
return family
return 'other'
@cacheInEnvironment
def arch():
"""
Get the architechture of the machine.
"""
return run('/bin/uname --machine')
__all__ = ['distroName', 'distroFamily', 'arch']
| Add a helper to detect the architecture. | Add a helper to detect the architecture.
| Python | mit | alex/braid,alex/braid | ---
+++
@@ -19,6 +19,8 @@
if succeeds('/usr/bin/test -f {}'.format(sentinel)):
return distro
+
+
def distroFamily():
"""
Get the family of the distro.
@@ -34,3 +36,14 @@
if distro in members:
return family
return 'other'
+
+
+@cacheInEnvironment
+def arch():
+ """
+ Get the architechture of the machine.
+ """
+ return run('/bin/uname --machine')
+
+
+__all__ = ['distroName', 'distroFamily', 'arch'] |
8026e6b21aacffc6f08d634103bc32b1775882ae | devicehive/transports/base_transport.py | devicehive/transports/base_transport.py | class BaseTransport(object):
"""Base transport class."""
def __init__(self, name, data_format_class, data_format_options,
handler_class, handler_options):
self._name = name
self._data_format = data_format_class(**data_format_options)
self._handler = handler_class(**handler_options)
self._connected = False
def _assert_not_connected(self):
assert not self._connected, 'transport connection already created'
def _assert_connected(self):
assert self._connected, 'transport connection has not created'
def _encode_obj(self, obj):
return self._data_format.encode(obj)
def _data_type(self):
return self._data_format.get_type()
def _decode_data(self, data):
return self._data_format.decode(data)
def _call_handler_method(self, name, *args):
getattr(self._handler, name)(self, *args)
def is_connected(self):
return self._connected
def connect(self, url, **options):
raise NotImplementedError
def request(self, action, request_object, **params):
raise NotImplementedError
def close(self):
raise NotImplementedError
def join(self, timeout=None):
raise NotImplementedError
| class BaseTransport(object):
"""Base transport class."""
def __init__(self, name, data_format_class, data_format_options,
handler_class, handler_options):
self._name = name
self._data_format = data_format_class(**data_format_options)
self._handler = handler_class(self, **handler_options)
self._connected = False
def _assert_not_connected(self):
assert not self._connected, 'transport connection already created'
def _assert_connected(self):
assert self._connected, 'transport connection has not created'
def _encode_obj(self, obj):
return self._data_format.encode(obj)
def _data_type(self):
return self._data_format.get_type()
def _decode_data(self, data):
return self._data_format.decode(data)
def _call_handler_method(self, name, *args):
getattr(self._handler, name)(*args)
def is_connected(self):
return self._connected
def connect(self, url, **options):
raise NotImplementedError
def request(self, action, request_object, **params):
raise NotImplementedError
def close(self):
raise NotImplementedError
def join(self, timeout=None):
raise NotImplementedError
| Remove transport from handler methods | Remove transport from handler methods
| Python | apache-2.0 | devicehive/devicehive-python | ---
+++
@@ -5,7 +5,7 @@
handler_class, handler_options):
self._name = name
self._data_format = data_format_class(**data_format_options)
- self._handler = handler_class(**handler_options)
+ self._handler = handler_class(self, **handler_options)
self._connected = False
def _assert_not_connected(self):
@@ -24,7 +24,7 @@
return self._data_format.decode(data)
def _call_handler_method(self, name, *args):
- getattr(self._handler, name)(self, *args)
+ getattr(self._handler, name)(*args)
def is_connected(self):
return self._connected |
22dcc9ee23841ecfbb23f76f2f8fd5c5c5bfb8cb | app/models.py | app/models.py | from app import db
class Base(db.Model):
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, default=db.func.current_timestamp())
class Route(Base):
__tablename__ = 'routes'
origin_point = db.Column(db.String(128), nullable=False)
destination_point = db.Column(db.String(128), nullable=False)
distance = db.Column(db.Integer, nullable=False)
def __init__(self, origin_point, destination_point, distance):
self.origin_point = origin_point
self.destination_point = destination_point
self.distance = distance
def __repr__(self):
return '<Route <{0}-{1}-{2]>'.format(self.origin_pint,
self.destination_point,
self.distance)
| from app import db
class Base(db.Model):
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, default=db.func.current_timestamp())
class Route(Base):
__tablename__ = 'routes'
origin_point = db.Column(db.String(128), nullable=False)
destination_point = db.Column(db.String(128), nullable=False)
distance = db.Column(db.Integer, nullable=False)
def __repr__(self):
return '<Route <{0}-{1}-{2]>'.format(self.origin_pint,
self.destination_point,
self.distance)
| Remove unecessary initialization from Route model | Remove unecessary initialization from Route model
| Python | mit | mdsrosa/routes_api_python | ---
+++
@@ -18,11 +18,6 @@
destination_point = db.Column(db.String(128), nullable=False)
distance = db.Column(db.Integer, nullable=False)
- def __init__(self, origin_point, destination_point, distance):
- self.origin_point = origin_point
- self.destination_point = destination_point
- self.distance = distance
-
def __repr__(self):
return '<Route <{0}-{1}-{2]>'.format(self.origin_pint,
self.destination_point, |
424fc74377ba4385e4c25fe90f888d39d5f14abd | runtests.py | runtests.py | #!/usr/bin/env python
from os.path import dirname, abspath
import sys
from django.conf import settings
if not settings.configured:
from django import VERSION
settings_dict = dict(
INSTALLED_APPS=(
'localeurl',
'localeurl.tests',
'django.contrib.sites', # for sitemap test
),
ROOT_URLCONF='localeurl.tests.test_urls',
)
if VERSION >= (1, 2):
settings_dict["DATABASES"] = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}}
else:
settings_dict["DATABASE_ENGINE"] = "sqlite3"
settings.configure(**settings_dict)
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.simple import DjangoTestSuiteRunner
def run_tests(test_args, verbosity, interactive):
runner = DjangoTestSuiteRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except ImportError:
# for Django versions that don't have DjangoTestSuiteRunner
from django.test.simple import run_tests
failures = run_tests(
test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
| #!/usr/bin/env python
from os.path import dirname, abspath
import sys
from django.conf import settings
if not settings.configured:
from django import VERSION
settings_dict = dict(
INSTALLED_APPS=(
'localeurl',
'localeurl.tests',
'django.contrib.sites', # for sitemap test
),
ROOT_URLCONF='localeurl.tests.test_urls',
SITE_ID=1,
)
if VERSION >= (1, 2):
settings_dict["DATABASES"] = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}}
else:
settings_dict["DATABASE_ENGINE"] = "sqlite3"
settings.configure(**settings_dict)
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.simple import DjangoTestSuiteRunner
def run_tests(test_args, verbosity, interactive):
runner = DjangoTestSuiteRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except ImportError:
# for Django versions that don't have DjangoTestSuiteRunner
from django.test.simple import run_tests
failures = run_tests(
test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
| Add SITE_ID to test settings since contrib.sites is in INSTALLED_APPS. | Add SITE_ID to test settings since contrib.sites is in INSTALLED_APPS.
| Python | mit | extertioner/django-localeurl,carljm/django-localeurl,gonnado/django-localeurl | ---
+++
@@ -14,6 +14,7 @@
'django.contrib.sites', # for sitemap test
),
ROOT_URLCONF='localeurl.tests.test_urls',
+ SITE_ID=1,
)
if VERSION >= (1, 2):
settings_dict["DATABASES"] = { |
fe5d330fd809285576b1696ccb9807910f5778ce | numscons/__init__.py | numscons/__init__.py | from core.misc import get_scons_path, get_scons_build_dir, \
get_scons_configres_dir, get_scons_configres_filename
from core.numpyenv import GetNumpyEnvironment
from core.libinfo_scons import NumpyCheckLibAndHeader
from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT
from fortran_scons import CheckF77Mangling
import tools
| # XXX those are needed by the scons command only...
from core.misc import get_scons_path, get_scons_build_dir, \
get_scons_configres_dir, get_scons_configres_filename
# XXX those should not be needed by the scons command only...
from core.extension import get_python_inc, get_pythonlib_dir
# Those functions really belong to the public API
from core.numpyenv import GetNumpyEnvironment
from core.libinfo_scons import NumpyCheckLibAndHeader
from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT
from fortran_scons import CheckF77Mangling
#import tools
| Mark things to clean up later in global import | Mark things to clean up later in global import | Python | bsd-3-clause | cournape/numscons,cournape/numscons,cournape/numscons | ---
+++
@@ -1,9 +1,14 @@
+# XXX those are needed by the scons command only...
from core.misc import get_scons_path, get_scons_build_dir, \
get_scons_configres_dir, get_scons_configres_filename
+# XXX those should not be needed by the scons command only...
+from core.extension import get_python_inc, get_pythonlib_dir
+
+# Those functions really belong to the public API
from core.numpyenv import GetNumpyEnvironment
from core.libinfo_scons import NumpyCheckLibAndHeader
from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT
from fortran_scons import CheckF77Mangling
-import tools
+#import tools |
b2ccc1b8144bf54f9e1416c187f9b5670e5d234d | src/librement/registration/forms.py | src/librement/registration/forms.py | from django import forms
from django.contrib.auth.models import User
from librement.profile.enums import AccountEnum
from librement.profile.models import Profile
class RegistrationForm(forms.ModelForm):
email = forms.EmailField()
password = forms.CharField()
password_confirm = forms.CharField()
class Meta:
model = Profile
fields = (
'account_type',
'organisation',
'address_1',
'address_2',
'city',
'region',
'zipcode',
'country',
)
def clean_password_confirm(self):
password = self.cleaned_data.get('password', '')
password_confirm = self.cleaned_data['password_confirm']
if password != password_confirm:
raise forms.ValidationError("Passwords do not match.")
return password
def clean_organisation(self):
val = self.cleaned_data['organisation']
account_type = self.cleaned_data.get('account_type')
if account_type != AccountEnum.INDIVIDUAL and val == '':
raise forms.ValidationError(
"Required field for company/non-profit accounts"
)
return val
def save(self):
user = User.objects.create_user(
username='FIXME',
password=self.cleaned_data['password'],
)
return user
| from django import forms
from django.contrib.auth.models import User
from librement.profile.enums import AccountEnum
from librement.account.models import Email
from librement.profile.models import Profile
class RegistrationForm(forms.ModelForm):
email = forms.EmailField()
password = forms.CharField()
password_confirm = forms.CharField()
class Meta:
model = Profile
fields = (
'account_type',
'organisation',
'address_1',
'address_2',
'city',
'region',
'zipcode',
'country',
)
def clean_email(self):
val = self.cleaned_data['email']
if Email.objects.filter(email=val).exists():
raise forms.ValidationError("Email address already in use.")
return val
def clean_password_confirm(self):
password = self.cleaned_data.get('password', '')
password_confirm = self.cleaned_data['password_confirm']
if password != password_confirm:
raise forms.ValidationError("Passwords do not match.")
return password
def clean_organisation(self):
val = self.cleaned_data['organisation']
account_type = self.cleaned_data.get('account_type')
if account_type != AccountEnum.INDIVIDUAL and val == '':
raise forms.ValidationError(
"Required field for company/non-profit accounts"
)
return val
def save(self):
user = User.objects.create_user(
username='FIXME',
password=self.cleaned_data['password'],
)
return user
| Check email address is not already used. | Check email address is not already used.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org>
| Python | agpl-3.0 | rhertzog/librement,rhertzog/librement,rhertzog/librement | ---
+++
@@ -3,6 +3,7 @@
from librement.profile.enums import AccountEnum
+from librement.account.models import Email
from librement.profile.models import Profile
class RegistrationForm(forms.ModelForm):
@@ -23,6 +24,14 @@
'zipcode',
'country',
)
+
+ def clean_email(self):
+ val = self.cleaned_data['email']
+
+ if Email.objects.filter(email=val).exists():
+ raise forms.ValidationError("Email address already in use.")
+
+ return val
def clean_password_confirm(self):
password = self.cleaned_data.get('password', '') |
adfe28a11fea94b207eea0417123a4155c909f05 | gpiocrust/__init__.py | gpiocrust/__init__.py | """
Object oriented wrapper around RPi.GPIO. Falls back to mock objects if RPi.GPIO
is not found.
"""
try:
import RPi.GPIO
from .raspberry_pi import *
except RuntimeError:
print(
'----------------------------------------------------------------------------')
print(
' WARNING: RPi.GPIO can only be run on the RPi. Falling back to mock objects.')
print(
'----------------------------------------------------------------------------')
except ImportError:
print('-------------------------------------------------------------------')
print(' WARNING: RPi.GPIO library not found. Falling back to mock objects.')
print('-------------------------------------------------------------------')
from .gpio_mock import *
| """
Object oriented wrapper around RPi.GPIO. Falls back to mock objects if RPi.GPIO
is not found.
"""
try:
import RPi.GPIO
from .raspberry_pi import *
except RuntimeError:
print(
'----------------------------------------------------------------------------')
print(
' WARNING: RPi.GPIO can only be run on the RPi. Falling back to mock objects.')
print(
'----------------------------------------------------------------------------')
from .gpio_mock import *
except ImportError:
print('-------------------------------------------------------------------')
print(' WARNING: RPi.GPIO library not found. Falling back to mock objects.')
print('-------------------------------------------------------------------')
from .gpio_mock import *
| Fix import errors on non-RPi platforms | Fix import errors on non-RPi platforms
| Python | mit | zourtney/gpiocrust | ---
+++
@@ -13,6 +13,7 @@
' WARNING: RPi.GPIO can only be run on the RPi. Falling back to mock objects.')
print(
'----------------------------------------------------------------------------')
+ from .gpio_mock import *
except ImportError:
print('-------------------------------------------------------------------')
print(' WARNING: RPi.GPIO library not found. Falling back to mock objects.') |
da2dc4e6f905356a705e2f75701f9d23c4b008ba | signac/contrib/errors.py | signac/contrib/errors.py | # Copyright (c) 2017 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
from ..core.errors import Error
class WorkspaceError(Error, OSError):
"Raised when there is an issue to create or access the workspace."
def __init__(self, error):
self.error = error
"The underlying error causing this issue."
class DestinationExistsError(Error, RuntimeError):
"The destination for a move or copy operation already exists."
def __init__(self, destination):
self.destination = destination
"The destination object causing the error."
class JobsCorruptedError(Error, RuntimeError):
"The state point manifest file of one or more jobs cannot be openend or is corrupted."
def __init__(self, job_ids):
self.job_ids = job_ids
"The job id(s) of the corrupted job(s)."
| # Copyright (c) 2017 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
from ..core.errors import Error
class WorkspaceError(Error, OSError):
"Raised when there is an issue to create or access the workspace."
def __init__(self, error):
self.error = error
"The underlying error causing this issue."
def __str__(self):
return self.error
class DestinationExistsError(Error, RuntimeError):
"The destination for a move or copy operation already exists."
def __init__(self, destination):
self.destination = destination
"The destination object causing the error."
class JobsCorruptedError(Error, RuntimeError):
"The state point manifest file of one or more jobs cannot be openend or is corrupted."
def __init__(self, job_ids):
self.job_ids = job_ids
"The job id(s) of the corrupted job(s)."
| Fix OSError not printing bug | Fix OSError not printing bug
| Python | bsd-3-clause | csadorf/signac,csadorf/signac | ---
+++
@@ -9,6 +9,9 @@
def __init__(self, error):
self.error = error
"The underlying error causing this issue."
+
+ def __str__(self):
+ return self.error
class DestinationExistsError(Error, RuntimeError): |
24da68c145750ae230a7a855a1daf3cd97a813d8 | config.py | config.py | # 4chThreadArchiver
# Copyright (C) 2016, Sebastian "Chloride Cull" Johansson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
ARCHIVE_SITE="https://desustorage.org"
UA="4chThreadArchiver/1.0 (Python {vinfo[0]}.{vinfo[1]}.{vinfo[2]}, using urllib)"
#UA="Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.71 Safari/537.36" | # 4chThreadArchiver
# Copyright (C) 2016, Sebastian "Chloride Cull" Johansson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
ARCHIVE_SITE="https://desuarchive.org"
UA="4chThreadArchiver/1.0 (Python {vinfo[0]}.{vinfo[1]}.{vinfo[2]}, using urllib)"
#UA="Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.71 Safari/537.36" | Change desustorage URL to desuarchive | Change desustorage URL to desuarchive
| Python | agpl-3.0 | ChlorideCull/4chThreadArchiver | ---
+++
@@ -11,6 +11,6 @@
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
-ARCHIVE_SITE="https://desustorage.org"
+ARCHIVE_SITE="https://desuarchive.org"
UA="4chThreadArchiver/1.0 (Python {vinfo[0]}.{vinfo[1]}.{vinfo[2]}, using urllib)"
#UA="Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.71 Safari/537.36" |
dad58aa0162290627e9d96a5047a507237a49b76 | calculate.py | calculate.py |
operators = {'+', '-', '*', '/', '(', ')'}
def parse_formula(text):
tokens = []
buffer = ''
for c in text:
if '0' <= c <= '9':
buffer += c
elif c in operators:
if buffer:
tokens.append(int(buffer))
tokens.append(c)
buffer = ''
if buffer:
tokens.append(int(buffer))
return tokens
if __name__ == '__main__':
import sys
if len(sys.argv) < 2:
print('Input formula required')
else:
formula = sys.argv[1]
print('tokens={}'.format(parse_formula(formula)))
| # List of operators along with their associated precedence
operators = {None: 100, '+': 3, '-': 3, '*': 2, '/': 2, '(': 1, ')': 1}
def operation(v1, v2, operator):
if item == '+':
return v1 + v2
elif item == '-':
return v1 - v2
elif item == '*':
return v1 * v2
elif item == '/':
return int(v1 / v2)
else:
raise ValueError('Unknown operator specified: {}'.format(item))
def parse_formula(text):
tokens = []
buffer = ''
for c in text:
if '0' <= c <= '9':
buffer += c
elif c in operators:
if buffer:
tokens.append(int(buffer))
tokens.append(c)
buffer = ''
if buffer:
tokens.append(int(buffer))
return tokens
if __name__ == '__main__':
import sys
if len(sys.argv) < 2:
print('Input formula required')
else:
formula = sys.argv[1]
tokens = parse_formula(formula)
operator_stack = []
operand_stack = []
for item in tokens:
if type(item) is int:
operand_stack.append(item)
elif type(item) is str:
if operator_stack:
peek = operator_stack[-1]
else:
peek = None
if operators[item] < operators[peek]:
operator_stack.append(item)
else:
value2 = operand_stack.pop()
value1 = operand_stack.pop()
operand_stack.append(operation(value1, value2, item))
else:
raise ValueError('Unknown item found in tokens')
while operator_stack:
item = operator_stack.pop()
value2 = operand_stack.pop()
value1 = operand_stack.pop()
operand_stack.append(operation(value1, value2, item))
print('Result = {}'.format(operand_stack.pop()))
| Add support for basic binary operations | Add support for basic binary operations
| Python | mit | MichaelAquilina/Simple-Calculator | ---
+++
@@ -1,5 +1,18 @@
+# List of operators along with their associated precedence
+operators = {None: 100, '+': 3, '-': 3, '*': 2, '/': 2, '(': 1, ')': 1}
-operators = {'+', '-', '*', '/', '(', ')'}
+
+def operation(v1, v2, operator):
+ if item == '+':
+ return v1 + v2
+ elif item == '-':
+ return v1 - v2
+ elif item == '*':
+ return v1 * v2
+ elif item == '/':
+ return int(v1 / v2)
+ else:
+ raise ValueError('Unknown operator specified: {}'.format(item))
def parse_formula(text):
@@ -30,7 +43,36 @@
print('Input formula required')
else:
formula = sys.argv[1]
+ tokens = parse_formula(formula)
- print('tokens={}'.format(parse_formula(formula)))
+ operator_stack = []
+ operand_stack = []
+ for item in tokens:
+ if type(item) is int:
+ operand_stack.append(item)
+ elif type(item) is str:
+ if operator_stack:
+ peek = operator_stack[-1]
+ else:
+ peek = None
+
+ if operators[item] < operators[peek]:
+ operator_stack.append(item)
+ else:
+ value2 = operand_stack.pop()
+ value1 = operand_stack.pop()
+
+ operand_stack.append(operation(value1, value2, item))
+ else:
+ raise ValueError('Unknown item found in tokens')
+
+ while operator_stack:
+ item = operator_stack.pop()
+ value2 = operand_stack.pop()
+ value1 = operand_stack.pop()
+
+ operand_stack.append(operation(value1, value2, item))
+
+ print('Result = {}'.format(operand_stack.pop())) |
b6b1117df271dae8adefa8cb8d3413b73fb393ce | touchpad_listener/touchpad_listener.py | touchpad_listener/touchpad_listener.py |
import serial
import sonic
sonic_pi = sonic.SonicPi()
connection = serial.Serial('/dev/tty.usbmodem1421', 115200)
while True:
line = connection.readline()
command, argument = line.strip().split(' ', 1)
if command == 'pad':
number = int(argument)
sonic_pi.run('cue :pad, number: {}'.format(number))
|
import serial
import sonic
import glob
sonic_pi = sonic.SonicPi()
connection = serial.Serial(glob.glob('/dev/tty.usbmodem*')[0], 115200)
while True:
line = connection.readline()
command, argument = line.strip().split(' ', 1)
if command == 'pad':
number = int(argument)
sonic_pi.run('cue :pad, number: {}'.format(number))
| Use `glob` to find an appropriate serial ttry | Use `glob` to find an appropriate serial ttry | Python | bsd-2-clause | CoderDojoScotland/coderdojo-sequencer,jonathanhogg/coderdojo-sequencer | ---
+++
@@ -1,10 +1,11 @@
import serial
import sonic
+import glob
sonic_pi = sonic.SonicPi()
-connection = serial.Serial('/dev/tty.usbmodem1421', 115200)
+connection = serial.Serial(glob.glob('/dev/tty.usbmodem*')[0], 115200)
while True:
line = connection.readline() |
3372bade0c5aee8c30c507832c842d6533608f61 | porunga/tests/test_main.py | porunga/tests/test_main.py | import unittest
from porunga import get_manager
from porunga.commands.test import PorungaTestCommand
class TestManager(unittest.TestCase):
def test_manager_has_proper_commands(self):
manager = get_manager()
commands = manager.get_commands()
self.assertIn('test', commands)
test_command = commands['test']
self.assertIsInstance(test_command, PorungaTestCommand)
| import unittest
from porunga import get_manager
from porunga.commands.test import PorungaTestCommand
class TestManager(unittest.TestCase):
def test_manager_has_proper_commands(self):
manager = get_manager()
commands = manager.get_commands()
self.assertTrue('test' in commands)
test_command = commands['test']
self.assertTrue(isinstance(test_command, PorungaTestCommand))
| Test updated to work with Python 2.6 | Test updated to work with Python 2.6
| Python | bsd-2-clause | lukaszb/porunga,lukaszb/porunga | ---
+++
@@ -9,7 +9,7 @@
manager = get_manager()
commands = manager.get_commands()
- self.assertIn('test', commands)
+ self.assertTrue('test' in commands)
test_command = commands['test']
- self.assertIsInstance(test_command, PorungaTestCommand)
+ self.assertTrue(isinstance(test_command, PorungaTestCommand))
|
71e73f19fcf9770f6b79092e3af66c9f27ecdc61 | eduid_dashboard_amp/__init__.py | eduid_dashboard_amp/__init__.py | from eduid_am.exceptions import UserDoesNotExist
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
else:
# white list of valid attributes for security reasons
for attr in ('email', 'date', 'verified'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
# This values must overwrite existent values
for attr in ('screen_name', 'last_name', 'first_name', 'passwords'):
value = user.get(attr, None)
if value is not None:
attributes[attr] = value
return attributes
| from eduid_am.exceptions import UserDoesNotExist
WHITELIST_SET_ATTRS = (
'givenName',
'sn',
'displayName',
'photo',
'preferredLanguage',
'mail',
# TODO: Arrays must use put or pop, not set, but need more deep refacts
'norEduPersonNIN',
'eduPersonEntitlement',
'mobile',
'mailAliases',
'portalAddress',
'passwords',
)
def attribute_fetcher(db, user_id):
attributes = {}
user = db.profiles.find_one({'_id': user_id})
if user is None:
raise UserDoesNotExist("No user matching _id='%s'" % user_id)
else:
# white list of valid attributes for security reasons
attributes_set = {}
for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value is not None:
attributes_set[attr] = value
attributes['$set'] = attributes_set
return attributes
| Add the new schema, now the $set modifier is used | Add the new schema, now the $set modifier is used
| Python | bsd-3-clause | SUNET/eduid-dashboard-amp | ---
+++
@@ -1,4 +1,23 @@
from eduid_am.exceptions import UserDoesNotExist
+
+
+WHITELIST_SET_ATTRS = (
+ 'givenName',
+ 'sn',
+ 'displayName',
+ 'photo',
+ 'preferredLanguage',
+ 'mail',
+
+ # TODO: Arrays must use put or pop, not set, but need more deep refacts
+ 'norEduPersonNIN',
+ 'eduPersonEntitlement',
+ 'mobile',
+ 'mailAliases',
+ 'portalAddress',
+
+ 'passwords',
+)
def attribute_fetcher(db, user_id):
@@ -10,15 +29,11 @@
else:
# white list of valid attributes for security reasons
- for attr in ('email', 'date', 'verified'):
+ attributes_set = {}
+ for attr in WHITELIST_SET_ATTRS:
value = user.get(attr, None)
if value is not None:
- attributes[attr] = value
+ attributes_set[attr] = value
- # This values must overwrite existent values
- for attr in ('screen_name', 'last_name', 'first_name', 'passwords'):
- value = user.get(attr, None)
- if value is not None:
- attributes[attr] = value
-
+ attributes['$set'] = attributes_set
return attributes |
805c52698b3fed8df98462c15045f5de3822e241 | edx_repo_tools/dev/clone_org.py | edx_repo_tools/dev/clone_org.py | """Clone an entire GitHub organization."""
import os.path
import click
from git.repo.base import Repo
from edx_repo_tools.auth import pass_github
@click.command()
@click.option(
'--forks/--no-forks', is_flag=True, default=False,
help="Should forks be included?"
)
@click.option(
'--depth', type=int, default=0,
help="Depth argument for git clone",
)
@click.argument(
'org'
)
@pass_github
def main(hub, forks, depth, org):
for repo in hub.organization(org).repositories():
if repo.fork and not forks:
continue
dir_name = repo.name
dir_name = dir_name.lstrip("-") # avoid dirname/option confusion
if os.path.exists(dir_name):
continue
print(repo.full_name)
clone_args = {}
if depth:
clone_args['depth'] = depth
Repo.clone_from(repo.ssh_url, dir_name, **clone_args)
| """Clone an entire GitHub organization."""
import os.path
import click
from git.repo.base import Repo
from edx_repo_tools.auth import pass_github
@click.command()
@click.option(
'--forks/--no-forks', is_flag=True, default=False,
help="Should forks be included?"
)
@click.option(
'--depth', type=int, default=0,
help="Depth argument for git clone",
)
@click.argument(
'org'
)
@pass_github
def main(hub, forks, depth, org):
for repo in hub.organization(org).iter_repos():
if repo.fork and not forks:
continue
dir_name = repo.name
dir_name = dir_name.lstrip("-") # avoid dirname/option confusion
if os.path.exists(dir_name):
continue
print(repo.full_name)
clone_args = {}
if depth:
clone_args['depth'] = depth
Repo.clone_from(repo.ssh_url, dir_name, **clone_args)
| Fix to work in python 3. | Fix to work in python 3.
| Python | apache-2.0 | edx/repo-tools,edx/repo-tools | ---
+++
@@ -6,7 +6,6 @@
from git.repo.base import Repo
from edx_repo_tools.auth import pass_github
-
@click.command()
@click.option(
@@ -22,7 +21,7 @@
)
@pass_github
def main(hub, forks, depth, org):
- for repo in hub.organization(org).repositories():
+ for repo in hub.organization(org).iter_repos():
if repo.fork and not forks:
continue
dir_name = repo.name |
40edb65ee751dfe4cf6e04ee59891266d8b14f30 | spacy/tests/regression/test_issue1380.py | spacy/tests/regression/test_issue1380.py | import pytest
from ...language import Language
def test_issue1380_empty_string():
nlp = Language()
doc = nlp('')
assert len(doc) == 0
@pytest.mark.models('en')
def test_issue1380_en(EN):
doc = EN('')
assert len(doc) == 0
| from __future__ import unicode_literals
import pytest
from ...language import Language
def test_issue1380_empty_string():
nlp = Language()
doc = nlp('')
assert len(doc) == 0
@pytest.mark.models('en')
def test_issue1380_en(EN):
doc = EN('')
assert len(doc) == 0
| Make test work for Python 2.7 | Make test work for Python 2.7
| Python | mit | recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy | ---
+++
@@ -1,3 +1,4 @@
+from __future__ import unicode_literals
import pytest
from ...language import Language |
66cb548c0d609e6364f9ec934814911760023d92 | ehriportal/urls.py | ehriportal/urls.py | from django.conf import settings
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from django.contrib import admin
admin.autodiscover()
from pinax.apps.account.openid_consumer import PinaxConsumer
handler500 = "pinax.views.server_error"
urlpatterns = patterns("",
url(r"^$", direct_to_template, {
"template": "homepage.html",
}, name="home"),
url(r"^admin/invite_user/$", "pinax.apps.signup_codes.views.admin_invite_user", name="admin_invite_user"),
url(r"^admin/", include(admin.site.urls)),
url(r"^about/", include("about.urls")),
url(r"^account/", include("pinax.apps.account.urls")),
url(r"^openid/", include(PinaxConsumer().urls)),
url(r"^profiles/", include("idios.urls")),
url(r"^notices/", include("notification.urls")),
url(r"^announcements/", include("announcements.urls")),
url(r"^repositories/", include("portal.repository_urls")),
url(r"^collections/", include("portal.collection_urls")),
)
if settings.SERVE_MEDIA:
urlpatterns += patterns("",
url(r"", include("staticfiles.urls")),
)
| from django.conf import settings
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from django.contrib import admin
admin.autodiscover()
from pinax.apps.account.openid_consumer import PinaxConsumer
handler500 = "pinax.views.server_error"
urlpatterns = patterns("",
url(r"^$", direct_to_template, {
"template": "homepage.html",
}, name="home"),
url(r"^admin/invite_user/$", "pinax.apps.signup_codes.views.admin_invite_user", name="admin_invite_user"),
url(r"^admin/", include(admin.site.urls)),
url(r"^about/", include("about.urls")),
url(r"^account/", include("pinax.apps.account.urls")),
url(r"^openid/", include(PinaxConsumer().urls)),
url(r"^profiles/", include("idios.urls")),
url(r"^notices/", include("notification.urls")),
url(r"^announcements/", include("announcements.urls")),
url(r"^repositories/", include("portal.repository_urls")),
url(r"^collections/", include("portal.collection_urls")),
)
if settings.SERVE_MEDIA:
urlpatterns += patterns("",
url(r"", include("staticfiles.urls"), {
'document_root': settings.MEDIA_ROOT,
}),
url("^site_media/media(?P<path>.*)$", 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}),
)
| Allow site_media/media to be served by staticfiles | Allow site_media/media to be served by staticfiles
| Python | mit | mikesname/ehri-collections,mikesname/ehri-collections,mikesname/ehri-collections | ---
+++
@@ -30,5 +30,10 @@
if settings.SERVE_MEDIA:
urlpatterns += patterns("",
- url(r"", include("staticfiles.urls")),
+ url(r"", include("staticfiles.urls"), {
+ 'document_root': settings.MEDIA_ROOT,
+ }),
+ url("^site_media/media(?P<path>.*)$", 'django.views.static.serve', {
+ 'document_root': settings.MEDIA_ROOT,
+ }),
) |
e442d59dc58f7d6eeea3a3786d806af07b2ccb6a | fixtures/generate_random_network.py | fixtures/generate_random_network.py | from factory import fuzzy
from users.tests.factories import UserFactory
RANDOM_SEED = 1024
def create_profiles(num_profiles):
fuzzy.reseed_random(RANDOM_SEED)
return [user.profile
for user in (UserFactory.create() for x in range(num_profiles))]
| from factory import fuzzy
from users.tests.factories import UserFactory
RANDOM_SEED = 1024
def create_profiles(num_profiles):
# This seed does nothing because I'm using fake-factory instead of
# factory.fuzzy. TODO fix it
fuzzy.reseed_random(RANDOM_SEED)
return [user.profile
for user in (UserFactory.create() for x in range(num_profiles))]
| Add todo to fix randomness | Add todo to fix randomness
| Python | mit | sbuss/voteswap,sbuss/voteswap,sbuss/voteswap,sbuss/voteswap | ---
+++
@@ -7,6 +7,8 @@
def create_profiles(num_profiles):
+ # This seed does nothing because I'm using fake-factory instead of
+ # factory.fuzzy. TODO fix it
fuzzy.reseed_random(RANDOM_SEED)
return [user.profile
for user in (UserFactory.create() for x in range(num_profiles))] |
f40da1b097d900c0c435d7550e891b0ece99bd91 | lib/torque_accounting.py | lib/torque_accounting.py | # torque_accounting.py
# Functions for working with Torque accounting files
def parse_line(line):
event = line.split(';')
job_name = event[2]
event_type = event[1]
event_time = event[0]
properties={}
prop_strings = event.split(" ")
for p in prop_strings:
prop=p.split("=")
if len(prop)=2:
properties[prop[0]] = prop[1]
return (job_name, event_type, event_time, properties)
def parse_records(text):
jobs = {}
lines=text.split("\n")
for line in lines:
job_name, event_type, event_time, properties = parse_line(line)
if not job_name in jobs:
jobs[job_name] = {}
jobs[job_name]['events'] = {}
jobs[job_name]['events'][event_type]=event_time
for p in properties:
jobs[job_name][p]=properties[p]
return jobs
| # torque_accounting.py
# Functions for working with Torque accounting files
def parse_line(line):
event = line.split(';')
job_name = event[2]
event_type = event[1]
event_time = event[0]
properties={}
prop_strings = event.split(" ")
for p in prop_strings:
prop=p.split("=")
if len(prop)=2:
properties[prop[0]] = prop[1]
return (job_name, event_type, event_time, properties)
def parse_records(text):
jobs = {}
lines=text.split("\n")
for line in lines:
if len(line)==0:
continue
job_name, event_type, event_time, properties = parse_line(line)
if not job_name in jobs:
jobs[job_name] = {}
jobs[job_name]['events'] = {}
jobs[job_name]['events'][event_type]=event_time
for p in properties:
jobs[job_name][p]=properties[p]
return jobs
def parse_files(filenames):
texts=[]
for fname in filenames:
f = open(fname,'r')
texts.append(f.read())
f.close
return parse_records("\n".join(texts))
| Add parse_files method to loop through a bunch of files | Add parse_files method to loop through a bunch of files
| Python | mit | ajdecon/torque_qhistory,ajdecon/torque_qhistory | ---
+++
@@ -22,6 +22,8 @@
lines=text.split("\n")
for line in lines:
+ if len(line)==0:
+ continue
job_name, event_type, event_time, properties = parse_line(line)
if not job_name in jobs:
jobs[job_name] = {}
@@ -32,3 +34,12 @@
jobs[job_name][p]=properties[p]
return jobs
+
+def parse_files(filenames):
+ texts=[]
+ for fname in filenames:
+ f = open(fname,'r')
+ texts.append(f.read())
+ f.close
+ return parse_records("\n".join(texts))
+ |
62e40ee27413b170d40791912d8509e26b981398 | examples/tools/print_devices.py | examples/tools/print_devices.py | # import PyOpenCL and Numpy. An OpenCL-enabled GPU is not required,
# OpenCL kernels can be compiled on most CPUs thanks to the Intel SDK for OpenCL
# or the AMD APP SDK.
import pyopencl as cl
def main():
dev_type_str = {}
for dev_type in ['ACCELERATOR', 'ALL', 'CPU', 'CUSTOM', 'DEFAULT', 'GPU']:
dev_type_str[getattr(cl.device_type, dev_type)] = dev_type
for platform_index, platform in enumerate(cl.get_platforms()):
print 'ID: %s' % platform_index
print platform.name
print platform.profile
print platform.vendor
print platform.version
for device in platform.get_devices():
for param in ['NAME', 'BUILT_IN_KERNELS', 'MAX_COMPUTE_UNITS',
'GLOBAL_MEM_SIZE', 'MAX_MEM_ALLOC_SIZE', 'TYPE',
'MAX_WORK_GROUP_SIZE']:
try:
value = device.get_info(getattr(cl.device_info, param))
except (cl.LogicError, AttributeError):
continue
print '\t',
if param == 'TYPE':
value = '%s (%s)' % (
value,
dev_type_str.get(value, 'UNDEF')
)
print '%s:\t%s' % (
param,
value
)
print ''
if __name__ == '__main__':
main()
| # import PyOpenCL and Numpy. An OpenCL-enabled GPU is not required,
# OpenCL kernels can be compiled on most CPUs thanks to the Intel SDK for OpenCL
# or the AMD APP SDK.
import pyopencl as cl
def main():
dev_type_str = {}
for dev_type in ['ACCELERATOR', 'ALL', 'CPU', 'CUSTOM', 'DEFAULT', 'GPU']:
dev_type_str[getattr(cl.device_type, dev_type)] = dev_type
for platform_index, platform in enumerate(cl.get_platforms()):
print 'platform: %s' % platform_index
print '%s' % platform.name
print '%s' % platform.profile
print '%s' % platform.vendor
print '%s' % platform.version
for device in platform.get_devices():
print ' device: %s' % platform_index
for param in ['NAME', 'BUILT_IN_KERNELS', 'MAX_COMPUTE_UNITS',
'GLOBAL_MEM_SIZE', 'MAX_MEM_ALLOC_SIZE', 'TYPE',
'MAX_WORK_GROUP_SIZE']:
try:
value = device.get_info(getattr(cl.device_info, param))
except (cl.LogicError, AttributeError):
continue
print ' ',
if param == 'TYPE':
value = '%s (%s)' % (
value,
dev_type_str.get(value, 'UNDEF')
)
print '%s:\t%s' % (
param,
value
)
print ''
if __name__ == '__main__':
main()
| Print devices example - change out format | Print devices example - change out format
| Python | mit | openre/openre,openre/openre | ---
+++
@@ -8,12 +8,13 @@
for dev_type in ['ACCELERATOR', 'ALL', 'CPU', 'CUSTOM', 'DEFAULT', 'GPU']:
dev_type_str[getattr(cl.device_type, dev_type)] = dev_type
for platform_index, platform in enumerate(cl.get_platforms()):
- print 'ID: %s' % platform_index
- print platform.name
- print platform.profile
- print platform.vendor
- print platform.version
+ print 'platform: %s' % platform_index
+ print '%s' % platform.name
+ print '%s' % platform.profile
+ print '%s' % platform.vendor
+ print '%s' % platform.version
for device in platform.get_devices():
+ print ' device: %s' % platform_index
for param in ['NAME', 'BUILT_IN_KERNELS', 'MAX_COMPUTE_UNITS',
'GLOBAL_MEM_SIZE', 'MAX_MEM_ALLOC_SIZE', 'TYPE',
'MAX_WORK_GROUP_SIZE']:
@@ -21,7 +22,7 @@
value = device.get_info(getattr(cl.device_info, param))
except (cl.LogicError, AttributeError):
continue
- print '\t',
+ print ' ',
if param == 'TYPE':
value = '%s (%s)' % (
value, |
139675bc644b796f4b472b3a8d9abd90205204c4 | bands_inspect/__init__.py | bands_inspect/__init__.py | # -*- coding: utf-8 -*-
# (c) 2017-2019, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
"""
A tool for modifying, comparing and plotting electronic bandstructures.
"""
from . import kpoints
from . import eigenvals
from . import compare
from . import lattice
from . import plot
__version__ = '0.2.1'
| # -*- coding: utf-8 -*-
# (c) 2017-2019, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
"""
A tool for modifying, comparing and plotting electronic bandstructures.
"""
from . import kpoints
from . import eigenvals
from . import compare
from . import lattice
from . import plot
__version__ = '0.2.2'
| Change version number to 0.2.2. | Change version number to 0.2.2.
| Python | apache-2.0 | Z2PackDev/bands_inspect,Z2PackDev/bands_inspect | ---
+++
@@ -12,4 +12,4 @@
from . import lattice
from . import plot
-__version__ = '0.2.1'
+__version__ = '0.2.2' |
0f0a9eda5be7cfe0a2076dc2dd8a4d24068f75e0 | benchmarks/step_detect.py | benchmarks/step_detect.py | try:
from asv import step_detect
except ImportError:
pass
class Simple:
def setup(self):
self.y = ([1]*20 + [2]*30)*50
def time_detect_regressions(self):
step_detect.detect_regressions(self.y)
def time_solve_potts_approx(self):
step_detect.solve_potts_approx(self.y, 0.3, p=1)
| try:
from asv import step_detect
except ImportError:
pass
class Simple:
def setup(self):
self.y = ([1]*20 + [2]*30)*50
if hasattr(step_detect, 'detect_steps'):
def time_detect_regressions(self):
steps = step_detect.detect_steps(self.y)
step_detect.detect_regressions(steps)
else:
def time_detect_regressions(self):
step_detect.detect_regressions(self.y)
def time_solve_potts_approx(self):
step_detect.solve_potts_approx(self.y, 0.3, p=1)
| Fix benchmarks vs. changes in b1cc0a9aa5107 | Fix benchmarks vs. changes in b1cc0a9aa5107
| Python | bsd-3-clause | qwhelan/asv,qwhelan/asv,pv/asv,pv/asv,spacetelescope/asv,spacetelescope/asv,airspeed-velocity/asv,spacetelescope/asv,airspeed-velocity/asv,airspeed-velocity/asv,qwhelan/asv,pv/asv,airspeed-velocity/asv,spacetelescope/asv,qwhelan/asv,pv/asv | ---
+++
@@ -8,8 +8,13 @@
def setup(self):
self.y = ([1]*20 + [2]*30)*50
- def time_detect_regressions(self):
- step_detect.detect_regressions(self.y)
+ if hasattr(step_detect, 'detect_steps'):
+ def time_detect_regressions(self):
+ steps = step_detect.detect_steps(self.y)
+ step_detect.detect_regressions(steps)
+ else:
+ def time_detect_regressions(self):
+ step_detect.detect_regressions(self.y)
def time_solve_potts_approx(self):
step_detect.solve_potts_approx(self.y, 0.3, p=1) |
89975de83d82695ba4615c72d17ac85baa39593d | invar/utils/ocr.py | invar/utils/ocr.py | # -*- coding: utf-8 -*-
from baluhn import generate as baluhn_generate, verify as baluhn_verify
from django.conf import settings
def generate(reference, check_length=settings.INVAR_OCR_CHECK_LENGTH):
reference = str(reference)
assert check_length == 1 or check_length == 2
if check_length == 1:
return reference + baluhn_generate(reference)
elif check_length == 2:
long_reference = reference + str(len(reference) + 2)
return long_reference + baluhn_generate(long_reference)
def strip(ocr, check_length=settings.INVAR_OCR_CHECK_LENGTH):
ocr = str(ocr)
assert check_length == 1 or check_length == 2
return ocr[:-check_length]
def verify(ocr, check_length=settings.INVAR_OCR_CHECK_LENGTH):
ocr = str(ocr)
assert check_length == 1 or check_length == 2
if baluhn_verify(ocr):
if check_length == 1:
return True
elif check_length == 2 and str(len(ocr)) == ocr[-2]:
return True
return False
| # -*- coding: utf-8 -*-
from baluhn import generate as baluhn_generate, verify as baluhn_verify
from django.conf import settings
def generate(reference, check_length=settings.INVAR_OCR_CHECK_LENGTH):
reference = str(reference)
assert check_length == 1 or check_length == 2
if check_length == 1:
return reference + baluhn_generate(reference)
elif check_length == 2:
long_reference = reference + str(len(reference) + 2)
return long_reference + baluhn_generate(long_reference)
def strip(ocr, check_length=settings.INVAR_OCR_CHECK_LENGTH):
ocr = str(ocr)
assert check_length == 1 or check_length == 2
return ocr[:-check_length]
def verify(ocr, check_length=settings.INVAR_OCR_CHECK_LENGTH):
ocr = str(ocr)
assert check_length == 1 or check_length == 2
if baluhn_verify(ocr):
if check_length == 1:
return True
elif check_length == 2:
if len(ocr) < 3:
return False
if str(len(ocr)) == ocr[-2]:
return True
return False
| Fix for empty reference bug | Fix for empty reference bug
| Python | mit | ovidner/bitket,ovidner/bitket,ovidner/bitket,ovidner/bitket | ---
+++
@@ -28,6 +28,9 @@
if baluhn_verify(ocr):
if check_length == 1:
return True
- elif check_length == 2 and str(len(ocr)) == ocr[-2]:
- return True
+ elif check_length == 2:
+ if len(ocr) < 3:
+ return False
+ if str(len(ocr)) == ocr[-2]:
+ return True
return False |
860ef2b11774bc6acab848a6b37b808938086973 | pylibui/core.py | pylibui/core.py | """
Python wrapper for libui.
"""
from . import libui
class App:
def __init__(self):
"""
Creates a new pylibui app.
"""
options = libui.uiInitOptions()
libui.uiInit(options)
def __enter__(self):
self.start()
def start(self):
"""
Starts the application main loop.
:return: None
"""
libui.uiMain()
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop()
self.close()
def stop(self):
"""
Stops the application main loop.
:return: None
"""
libui.uiQuit()
def close(self):
"""
Closes the application and frees resources.
:return: None
"""
libui.uiUninit()
| """
Python wrapper for libui.
"""
from . import libui
class App:
def __init__(self):
"""
Creates a new pylibui app.
"""
options = libui.uiInitOptions()
libui.uiInit(options)
def start(self):
"""
Starts the application main loop.
:return: None
"""
libui.uiMain()
def stop(self):
"""
Stops the application main loop.
:return: None
"""
libui.uiQuit()
def close(self):
"""
Closes the application and frees resources.
:return: None
"""
libui.uiUninit()
| Revert "Make App a context manager" | Revert "Make App a context manager"
| Python | mit | superzazu/pylibui,superzazu/pylibui,joaoventura/pylibui,joaoventura/pylibui | ---
+++
@@ -16,9 +16,6 @@
options = libui.uiInitOptions()
libui.uiInit(options)
- def __enter__(self):
- self.start()
-
def start(self):
"""
Starts the application main loop.
@@ -26,10 +23,6 @@
:return: None
"""
libui.uiMain()
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self.stop()
- self.close()
def stop(self):
""" |
d8c21c06f93f479fbbabaf02e4993499bd147e94 | python/setup.py | python/setup.py | #!/usr/bin/env python
from setuptools import setup, Extension
import string
import sys
import os
sys.path.append('./test')
with open("README.md") as f:
long_description = f.read()
def cmd(line):
return os.popen(line).readlines()[0][:-1].split()
setup(name = 'sentencepiece',
author = 'Taku Kudo',
author_email='taku@google.com',
description = 'SentencePiece python wrapper',
long_description = long_description,
url = 'https://github.com/google/sentencepiece',
license = 'Apache',
platforms = 'Unix',
py_modules=['sentencepiece'],
ext_modules = [Extension('_sentencepiece',
sources=['sentencepiece_wrap.cxx'],
extra_compile_args=['-std=c++11'] +
cmd('pkg-config sentencepiece --cflags'),
extra_link_args=cmd('pkg-config sentencepiece --libs'))
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Libraries :: Python Modules'
],
test_suite = 'sentencepiece_test.suite')
| #!/usr/bin/env python
from setuptools import setup, Extension
import string
import subprocess
import sys
sys.path.append('./test')
with open("README.md") as f:
long_description = f.read()
def cmd(line):
try:
output = subprocess.check_output(line, shell=True)
except subprocess.CalledProcessError:
sys.stderr.write('Failed to find sentencepiece pkgconfig\n')
sys.exit(1)
return output.strip().split()
setup(name = 'sentencepiece',
author = 'Taku Kudo',
author_email='taku@google.com',
description = 'SentencePiece python wrapper',
long_description = long_description,
url = 'https://github.com/google/sentencepiece',
license = 'Apache',
platforms = 'Unix',
py_modules=['sentencepiece'],
ext_modules = [Extension('_sentencepiece',
sources=['sentencepiece_wrap.cxx'],
extra_compile_args=['-std=c++11'] +
cmd('pkg-config sentencepiece --cflags'),
extra_link_args=cmd('pkg-config sentencepiece --libs'))
],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Libraries :: Python Modules'
],
test_suite = 'sentencepiece_test.suite')
| Call sys.exit when pkg-config fails | Call sys.exit when pkg-config fails
| Python | apache-2.0 | google/sentencepiece,google/sentencepiece,google/sentencepiece,google/sentencepiece | ---
+++
@@ -2,8 +2,8 @@
from setuptools import setup, Extension
import string
+import subprocess
import sys
-import os
sys.path.append('./test')
@@ -11,7 +11,12 @@
long_description = f.read()
def cmd(line):
- return os.popen(line).readlines()[0][:-1].split()
+ try:
+ output = subprocess.check_output(line, shell=True)
+ except subprocess.CalledProcessError:
+ sys.stderr.write('Failed to find sentencepiece pkgconfig\n')
+ sys.exit(1)
+ return output.strip().split()
setup(name = 'sentencepiece',
author = 'Taku Kudo', |
d84f42d45bb16820fb0077c9f0f92ba88e24d5de | cabot/cabotapp/jenkins.py | cabot/cabotapp/jenkins.py | from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
| from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
if settings.JENKINS_USER:
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
else:
auth = None
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
| Fix Jenkins tests when no user is set | Fix Jenkins tests when no user is set
| Python | mit | Affirm/cabot,Affirm/cabot,Affirm/cabot,Affirm/cabot | ---
+++
@@ -8,7 +8,10 @@
logger = get_task_logger(__name__)
-auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
+if settings.JENKINS_USER:
+ auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
+else:
+ auth = None
def get_job_status(jobname): |
51059ae476ca0dd553220cf25c73a0eb14a099de | RecorderFactory.py | RecorderFactory.py | from Recorders import Recorder, PrintRecorder, FileRecorder
factory = dict([
('print', create_print_recorder),
('file', create_file_recorder)])
def create_recorder(config):
return factory[config.type](config.config)
def create_print_recorder(config):
return PrintRecorder(config)
def create_file_recorder(config):
return FileRecorder(config) | from Recorders import Recorder, PrintRecorder, FileRecorder
def create_print_recorder(config):
return PrintRecorder(config)
def create_file_recorder(config):
return FileRecorder(config)
recorderInitializers = dict([
('print', create_print_recorder),
('file', create_file_recorder)])
def create_recorder(config):
return recorderInitializers[config.type](config.config) | Define initializers before use them | Define initializers before use them
| Python | mit | hectortosa/py-temperature-recorder | ---
+++
@@ -1,14 +1,14 @@
from Recorders import Recorder, PrintRecorder, FileRecorder
-
-factory = dict([
- ('print', create_print_recorder),
- ('file', create_file_recorder)])
-
-def create_recorder(config):
- return factory[config.type](config.config)
def create_print_recorder(config):
return PrintRecorder(config)
def create_file_recorder(config):
return FileRecorder(config)
+
+recorderInitializers = dict([
+ ('print', create_print_recorder),
+ ('file', create_file_recorder)])
+
+def create_recorder(config):
+ return recorderInitializers[config.type](config.config) |
e40985c1ecba1529987ed9551210677ea93b9614 | test/unit/builtins/test_install.py | test/unit/builtins/test_install.py | from .common import BuiltinTest
from bfg9000.builtins import default, install # noqa
from bfg9000 import file_types
from bfg9000.path import Path, Root
class TestInstall(BuiltinTest):
def test_install_none(self):
self.assertEqual(self.builtin_dict['install'](), None)
def test_install_single(self):
exe = file_types.Executable(Path('exe', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe), exe)
def test_install_multiple(self):
exe1 = file_types.Executable(Path('exe1', Root.srcdir), None)
exe2 = file_types.Executable(Path('exe2', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe1, exe2),
(exe1, exe2))
def test_invalid(self):
phony = file_types.Phony('name')
self.assertRaises(TypeError, self.builtin_dict['install'], phony)
exe = file_types.Executable(Path('/path/to/exe', Root.absolute), None)
self.assertRaises(ValueError, self.builtin_dict['install'], exe)
| import mock
from .common import BuiltinTest
from bfg9000.builtins import default, install # noqa
from bfg9000 import file_types
from bfg9000.path import Path, Root
class TestInstall(BuiltinTest):
def test_install_none(self):
self.assertEqual(self.builtin_dict['install'](), None)
def test_install_single(self):
exe = file_types.Executable(Path('exe', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe), exe)
def test_install_multiple(self):
exe1 = file_types.Executable(Path('exe1', Root.srcdir), None)
exe2 = file_types.Executable(Path('exe2', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe1, exe2),
(exe1, exe2))
def test_invalid(self):
phony = file_types.Phony('name')
self.assertRaises(TypeError, self.builtin_dict['install'], phony)
exe = file_types.Executable(Path('/path/to/exe', Root.absolute), None)
self.assertRaises(ValueError, self.builtin_dict['install'], exe)
def test_cant_install(self):
with mock.patch('bfg9000.builtins.install.can_install',
return_value=False), \
mock.patch('warnings.warn') as m: # noqa
exe = file_types.Executable(Path('exe', Root.srcdir), None)
self.assertEqual(self.builtin_dict['install'](exe), exe)
m.assert_called_once()
| Add tests for unset installation dirs | Add tests for unset installation dirs
| Python | bsd-3-clause | jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000 | ---
+++
@@ -1,3 +1,5 @@
+import mock
+
from .common import BuiltinTest
from bfg9000.builtins import default, install # noqa
@@ -25,3 +27,11 @@
exe = file_types.Executable(Path('/path/to/exe', Root.absolute), None)
self.assertRaises(ValueError, self.builtin_dict['install'], exe)
+
+ def test_cant_install(self):
+ with mock.patch('bfg9000.builtins.install.can_install',
+ return_value=False), \
+ mock.patch('warnings.warn') as m: # noqa
+ exe = file_types.Executable(Path('exe', Root.srcdir), None)
+ self.assertEqual(self.builtin_dict['install'](exe), exe)
+ m.assert_called_once() |
260cd3b96df3a4746560db0032d7b6042c55d7fc | integration-test/976-fractional-pois.py | integration-test/976-fractional-pois.py | # https://www.openstreetmap.org/way/147689077
# Apple Store, SF
assert_has_feature(
15, 5242, 12664, 'pois',
{ 'id': 147689077, 'min_zoom': 15.68 })
| # https://www.openstreetmap.org/way/147689077
# Apple Store, SF
assert_has_feature(
15, 5242, 12664, 'pois',
{ 'id': 147689077, 'min_zoom': 15.68 })
# Test that source and min_zoom are set properly for boundaries, roads, transit, and water
assert_has_feature(
5, 9, 12, 'boundaries',
{ 'min_zoom': 0 , 'id': 8024,
'source': 'naturalearthdata.com',
'name': 'New Jersey - Pennsylvania' })
assert_has_feature(
5, 9, 12, 'roads',
{ 'min_zoom': 5 , 'id': 90,
'source': 'naturalearthdata.com' })
# There is no transit data from Natural Earth
assert_has_feature(
5, 9, 12, 'water',
{ 'min_zoom': 0 , 'id': 1144,
'source': 'naturalearthdata.com',
'name': 'John H. Kerr Reservoir' })
# https://www.openstreetmap.org/relation/224951
# https://www.openstreetmap.org/relation/61320
assert_has_feature(
9, 150, 192, 'boundaries',
{ 'min_zoom': 8, 'id': -224951,
'source': 'openstretmap.org',
'name': 'New Jersey - New York' })
assert_has_feature(
9, 150, 192, 'roads',
{ 'min_zoom': 8, 'sort_key': 381,
'source': 'openstretmap.org',
'kind': 'Major Road',
'network': 'US:NJ:Hudson' })
assert_has_feature(
9, 150, 192, 'transit',
{ 'min_zoom': 5, 'ref': '54-57',
'source': 'openstretmap.org',
'name': 'Vermonter' })
assert_has_feature(
9, 150, 192, 'water',
{ 'min_zoom': 0, 'id': 10613,
'source': 'openstretmapdata.com',
'kind': 'ocean',
'name': '' })
| Add tests for source and min_zoom | Add tests for source and min_zoom
| Python | mit | mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource | ---
+++
@@ -3,3 +3,51 @@
assert_has_feature(
15, 5242, 12664, 'pois',
{ 'id': 147689077, 'min_zoom': 15.68 })
+
+# Test that source and min_zoom are set properly for boundaries, roads, transit, and water
+assert_has_feature(
+ 5, 9, 12, 'boundaries',
+ { 'min_zoom': 0 , 'id': 8024,
+ 'source': 'naturalearthdata.com',
+ 'name': 'New Jersey - Pennsylvania' })
+
+assert_has_feature(
+ 5, 9, 12, 'roads',
+ { 'min_zoom': 5 , 'id': 90,
+ 'source': 'naturalearthdata.com' })
+
+# There is no transit data from Natural Earth
+
+assert_has_feature(
+ 5, 9, 12, 'water',
+ { 'min_zoom': 0 , 'id': 1144,
+ 'source': 'naturalearthdata.com',
+ 'name': 'John H. Kerr Reservoir' })
+
+# https://www.openstreetmap.org/relation/224951
+# https://www.openstreetmap.org/relation/61320
+assert_has_feature(
+ 9, 150, 192, 'boundaries',
+ { 'min_zoom': 8, 'id': -224951,
+ 'source': 'openstretmap.org',
+ 'name': 'New Jersey - New York' })
+
+assert_has_feature(
+ 9, 150, 192, 'roads',
+ { 'min_zoom': 8, 'sort_key': 381,
+ 'source': 'openstretmap.org',
+ 'kind': 'Major Road',
+ 'network': 'US:NJ:Hudson' })
+
+assert_has_feature(
+ 9, 150, 192, 'transit',
+ { 'min_zoom': 5, 'ref': '54-57',
+ 'source': 'openstretmap.org',
+ 'name': 'Vermonter' })
+
+assert_has_feature(
+ 9, 150, 192, 'water',
+ { 'min_zoom': 0, 'id': 10613,
+ 'source': 'openstretmapdata.com',
+ 'kind': 'ocean',
+ 'name': '' }) |
633969d8d53d28db48bb2478820e90315329542c | tiddlywebplugins/prettyerror/instance.py | tiddlywebplugins/prettyerror/instance.py |
store_contents = {}
store_structure = {}
store_contents['_default_errors'] = [
'src/_errors/index.recipe',
]
store_structure['recipes'] = {}
store_structure['bags'] = {}
store_structure['recipes']['_errors'] = {
'desc': 'Pretty Errors Error Tiddlers',
'recipe': [
('_default_errors', ''),
('_errors', ''),
],
'policy': {
'read': [],
'write': ['R:ADMIN'],
'manage': ['R:ADMIN'],
'delete': ['R:ADMIN'],
'owner': 'administractor',
'write': ['R:ADMIN'],
},
}
store_structure['bags']['_default_errors'] = {
'desc': 'Default error tiddlers for Pretty Errors',
'policy': {
'read': [],
'write': ['NONE'],
'create': ['NONE'],
'delete': ['NONE'],
'manage': ['NONE'],
'accept': ['NONE'],
'owner': 'administrator',
},
}
store_structure['bags']['_errors'] = {
'desc': 'Override error tiddlers for Pretty Errors',
'policy': {
'read': [],
'write': ['R:ADMIN'],
'create': ['R:ADMIN'],
'delete': ['R:ADMIN'],
'manage': ['R:ADMIN'],
'accept': ['NONE'],
'owner': 'administrator',
},
}
instance_config = {
'system_plugins': ['tiddlywebplugins.prettyerror'],
}
| """
Establish the data structures representing the bags and recipes needed
by this plugin.
"""
store_contents = {}
store_structure = {}
store_contents['_default_errors'] = [
'src/_errors/index.recipe',
]
store_structure['recipes'] = {}
store_structure['bags'] = {}
store_structure['recipes']['_errors'] = {
'desc': 'Pretty Errors Error Tiddlers',
'recipe': [
('_default_errors', ''),
('_errors', ''),
],
'policy': {
'read': [],
'write': ['R:ADMIN'],
'manage': ['R:ADMIN'],
'delete': ['R:ADMIN'],
'owner': 'administractor',
'create': ['R:ADMIN'],
},
}
store_structure['bags']['_default_errors'] = {
'desc': 'Default error tiddlers for Pretty Errors',
'policy': {
'read': [],
'write': ['NONE'],
'create': ['NONE'],
'delete': ['NONE'],
'manage': ['NONE'],
'accept': ['NONE'],
'owner': 'administrator',
},
}
store_structure['bags']['_errors'] = {
'desc': 'Override error tiddlers for Pretty Errors',
'policy': {
'read': [],
'write': ['R:ADMIN'],
'create': ['R:ADMIN'],
'delete': ['R:ADMIN'],
'manage': ['R:ADMIN'],
'accept': ['NONE'],
'owner': 'administrator',
},
}
instance_config = {
'system_plugins': ['tiddlywebplugins.prettyerror'],
}
| Correct set policy on bag. | Correct set policy on bag.
Had been missing create constraint.
| Python | bsd-3-clause | tiddlyweb/tiddlywebplugins.prettyerror | ---
+++
@@ -1,3 +1,7 @@
+"""
+Establish the data structures representing the bags and recipes needed
+by this plugin.
+"""
store_contents = {}
store_structure = {}
@@ -21,7 +25,7 @@
'manage': ['R:ADMIN'],
'delete': ['R:ADMIN'],
'owner': 'administractor',
- 'write': ['R:ADMIN'],
+ 'create': ['R:ADMIN'],
},
}
|
fe37ef9248f8658296e6f465d380d639d6047a5d | aspen/server/diesel_.py | aspen/server/diesel_.py | import diesel
from aspen.server import BaseEngine
from diesel.protocols import wsgi
class Engine(BaseEngine):
app = None # a diesel app instance
def bind(self):
self.app = wsgi.WSGIApplication( self.website
, self.website.address[1]
, self.website.address[0]
)
def start(self):
self.app.run()
def stop(self):
try:
self.app.halt()
except diesel.app.ApplicationEnd:
pass # Only you can prevent log spam.
def start_restarter(self, check_all):
def loop():
while True:
check_all()
diesel.sleep(0.5)
self.app.add_loop(diesel.Loop(loop))
| import diesel
from aspen.server import BaseEngine
from diesel.protocols import wsgi
class Engine(BaseEngine):
diesel_app = None # a diesel diesel_app instance
def bind(self):
self.diesel_app = wsgi.WSGIApplication( self.website
, self.website.address[1]
, self.website.address[0]
)
def start(self):
self.diesel_app.run()
def stop(self):
try:
if self.diesel_app is not None:
self.diesel_app.halt()
except diesel.app.ApplicationEnd:
pass # Only you can prevent log spam.
def start_restarter(self, check_all):
def loop():
while True:
check_all()
diesel.sleep(0.5)
self.diesel_app.add_loop(diesel.Loop(loop))
| Reduce log spam from diesel. | Reduce log spam from diesel.
| Python | mit | gratipay/aspen.py,gratipay/aspen.py | ---
+++
@@ -5,20 +5,21 @@
class Engine(BaseEngine):
- app = None # a diesel app instance
+ diesel_app = None # a diesel diesel_app instance
def bind(self):
- self.app = wsgi.WSGIApplication( self.website
- , self.website.address[1]
- , self.website.address[0]
- )
+ self.diesel_app = wsgi.WSGIApplication( self.website
+ , self.website.address[1]
+ , self.website.address[0]
+ )
def start(self):
- self.app.run()
+ self.diesel_app.run()
def stop(self):
try:
- self.app.halt()
+ if self.diesel_app is not None:
+ self.diesel_app.halt()
except diesel.app.ApplicationEnd:
pass # Only you can prevent log spam.
@@ -27,4 +28,4 @@
while True:
check_all()
diesel.sleep(0.5)
- self.app.add_loop(diesel.Loop(loop))
+ self.diesel_app.add_loop(diesel.Loop(loop)) |
cc08fcbb513224aafe6c04143a150d1019c032ef | setup_py2exe.py | setup_py2exe.py | #!/usr/bin/env python
# C:\Python27_32\python.exe setup_py2exe.py py2exe
from distutils.core import setup
from glob import glob
import os
import py2exe
from setup import SSLYZE_SETUP
data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))]
# Trust Stores
plugin_data_path = 'plugins\\data\\trust_stores'
plugin_data_files = []
for file in os.listdir(plugin_data_path):
file = os.path.join(plugin_data_path, file)
if os.path.isfile(file): # skip directories
plugin_data_files.append( file)
data_files.append((plugin_data_path, plugin_data_files))
sslyze_setup_py2exe = SSLYZE_SETUP.copy()
sslyze_setup_py2exe.update(
{
'console' : ['sslyze.py'],
'data_files' : data_files,
'zipfile' : None,
'options' : {'py2exe':{
#'skip_archive': True,
'bundle_files': 1,
}}
}
)
setup(**sslyze_setup_py2exe)
| #!/usr/bin/env python
# C:\Python27_32\python.exe setup_py2exe.py py2exe
from distutils.core import setup
from glob import glob
import os
import py2exe
from setup import SSLYZE_SETUP
data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))]
# Trust Stores
plugin_data_files = []
for file in os.listdir('plugins\\data\\trust_stores'):
file = os.path.join('plugins\\data\\trust_stores', file)
if os.path.isfile(file): # skip directories
plugin_data_files.append( file)
data_files.append(('data\\trust_stores', plugin_data_files))
sslyze_setup_py2exe = SSLYZE_SETUP.copy()
sslyze_setup_py2exe.update(
{
'console' : ['sslyze.py'],
'data_files' : data_files,
'zipfile' : None,
'options' : {'py2exe':{
#'skip_archive': True,
'bundle_files': 1,
}}
}
)
setup(**sslyze_setup_py2exe)
| Fix trust stores paths for py2exe builds | Fix trust stores paths for py2exe builds
| Python | agpl-3.0 | nabla-c0d3/sslyze | ---
+++
@@ -11,14 +11,13 @@
data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))]
# Trust Stores
-plugin_data_path = 'plugins\\data\\trust_stores'
plugin_data_files = []
-for file in os.listdir(plugin_data_path):
- file = os.path.join(plugin_data_path, file)
+for file in os.listdir('plugins\\data\\trust_stores'):
+ file = os.path.join('plugins\\data\\trust_stores', file)
if os.path.isfile(file): # skip directories
plugin_data_files.append( file)
-data_files.append((plugin_data_path, plugin_data_files))
+data_files.append(('data\\trust_stores', plugin_data_files))
sslyze_setup_py2exe = SSLYZE_SETUP.copy() |
013d0e3b2d8fdc212ae63f635a1e8def988672fa | tests/structures/test_sequences.py | tests/structures/test_sequences.py | import unittest
from ..utils import TranspileTestCase
class SequenceTests(TranspileTestCase):
def test_unpack_sequence(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b, c = x
print(a)
print(b)
print(c)
""")
@unittest.skip('Feature not yet implemented')
def test_unpack_sequence_overflow(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b = x
print(a)
print(b)
""")
@unittest.skip('Feature not yet implemented')
def test_unpack_sequence_underflow(self):
self.assertCodeExecution("""
x = [1, 2]
a, b, c = x
print(a)
print(b)
print(c)
""")
| import unittest
from ..utils import TranspileTestCase
class SequenceTests(TranspileTestCase):
def test_unpack_sequence(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b, c = x
print(a)
print(b)
print(c)
""")
@unittest.expectedFailure
def test_unpack_sequence_overflow(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b = x
print(a)
print(b)
""")
@unittest.expectedFailure
def test_unpack_sequence_underflow(self):
self.assertCodeExecution("""
x = [1, 2]
a, b, c = x
print(a)
print(b)
print(c)
""")
| Convert some skips to expected failures. | Convert some skips to expected failures.
| Python | bsd-3-clause | glasnt/voc,freakboy3742/voc,ASP1234/voc,ASP1234/voc,Felix5721/voc,pombredanne/voc,pombredanne/voc,Felix5721/voc,cflee/voc,cflee/voc,gEt-rIgHt-jR/voc,glasnt/voc,gEt-rIgHt-jR/voc,freakboy3742/voc | ---
+++
@@ -13,7 +13,7 @@
print(c)
""")
- @unittest.skip('Feature not yet implemented')
+ @unittest.expectedFailure
def test_unpack_sequence_overflow(self):
self.assertCodeExecution("""
x = [1, 2, 3]
@@ -22,7 +22,7 @@
print(b)
""")
- @unittest.skip('Feature not yet implemented')
+ @unittest.expectedFailure
def test_unpack_sequence_underflow(self):
self.assertCodeExecution("""
x = [1, 2] |
9639cb7607d301abbc7ad6c8b22aa97e6a0eb5cb | tests/examples/test_examples_run.py | tests/examples/test_examples_run.py | import pytest
from os.path import abspath, basename, dirname, join
import subprocess
import glob
import sys
cwd = abspath(dirname(__file__))
examples_dir = join(cwd, "..", "..", "examples")
example_files = glob.glob("%s/*.py" % examples_dir)
@pytest.fixture(params=[pytest.param(f, marks=pytest.mark.xfail(reason="unreliable"))
if f.endswith("mountain_hydrostatic.py")
else f for f in example_files],
ids=lambda x: basename(x))
def example_file(request):
return abspath(request.param)
def test_example_runs(example_file, tmpdir, monkeypatch):
# This ensures that the test writes output in a temporary
# directory, rather than where pytest was run from.
monkeypatch.chdir(tmpdir)
subprocess.check_call([sys.executable, example_file, "--running-tests"])
| import pytest
from os.path import abspath, basename, dirname, join
import subprocess
import glob
import sys
cwd = abspath(dirname(__file__))
examples_dir = join(cwd, "..", "..", "examples")
example_files = glob.glob("%s/*.py" % examples_dir)
@pytest.fixture(params=glob.glob("%s/*.py" % examples_dir),
ids=lambda x: basename(x))
def example_file(request):
return abspath(request.param)
def test_example_runs(example_file, tmpdir, monkeypatch):
# This ensures that the test writes output in a temporary
# directory, rather than where pytest was run from.
monkeypatch.chdir(tmpdir)
subprocess.check_call([sys.executable, example_file, "--running-tests"])
| Remove pytest flag for xfails | Remove pytest flag for xfails
| Python | mit | firedrakeproject/gusto,firedrakeproject/dcore | ---
+++
@@ -10,9 +10,7 @@
example_files = glob.glob("%s/*.py" % examples_dir)
-@pytest.fixture(params=[pytest.param(f, marks=pytest.mark.xfail(reason="unreliable"))
- if f.endswith("mountain_hydrostatic.py")
- else f for f in example_files],
+@pytest.fixture(params=glob.glob("%s/*.py" % examples_dir),
ids=lambda x: basename(x))
def example_file(request):
return abspath(request.param) |
e783dfef25eb1e3b06064fb2bd125cef4f56ec08 | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-gjslint
# License: MIT
#
"""This module exports the GJSLint plugin linter class."""
from SublimeLinter.lint import Linter
class GJSLint(Linter):
"""Provides an interface to the gjslint executable."""
language = ('javascript', 'html')
cmd = 'gjslint --nobeep --nosummary'
regex = r'^Line (?P<line>\d+), (?:(?P<error>E)|(?P<warning>W)):\d+: (?P<message>[^"]+(?P<near>"[^"]+")?)$'
comment_re = r'\s*/[/*]'
defaults = {
'--jslint_error:,+': '',
'--disable:,': '',
'--max_line_length:': None
}
inline_settings = 'max_line_length'
inline_overrides = 'disable'
tempfile_suffix = 'js'
selectors = {
'html': 'source.js.embedded.html'
}
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-gjslint
# License: MIT
#
"""This module exports the GJSLint plugin linter class."""
from SublimeLinter.lint import Linter
class GJSLint(Linter):
"""Provides an interface to the gjslint executable."""
syntax = ('javascript', 'html')
cmd = 'gjslint --nobeep --nosummary'
regex = r'^Line (?P<line>\d+), (?:(?P<error>E)|(?P<warning>W)):\d+: (?P<message>[^"]+(?P<near>"[^"]+")?)$'
comment_re = r'\s*/[/*]'
defaults = {
'--jslint_error:,+': '',
'--disable:,': '',
'--max_line_length:': None
}
inline_settings = 'max_line_length'
inline_overrides = 'disable'
tempfile_suffix = 'js'
selectors = {
'html': 'source.js.embedded.html'
}
| Change 'language' to 'syntax', that is more precise terminology. | Change 'language' to 'syntax', that is more precise terminology.
| Python | mit | SublimeLinter/SublimeLinter-gjslint | ---
+++
@@ -18,7 +18,7 @@
"""Provides an interface to the gjslint executable."""
- language = ('javascript', 'html')
+ syntax = ('javascript', 'html')
cmd = 'gjslint --nobeep --nosummary'
regex = r'^Line (?P<line>\d+), (?:(?P<error>E)|(?P<warning>W)):\d+: (?P<message>[^"]+(?P<near>"[^"]+")?)$'
comment_re = r'\s*/[/*]' |
ce6685d18492fe0787ded92939f52916e0d9cbaa | lc046_permutations.py | lc046_permutations.py | """Leetcode 46. Permutations
Medium
URL: https://leetcode.com/problems/permutations/
Given a collection of distinct integers, return all possible permutations.
Example:
Input: [1,2,3]
Output:
[
[1,2,3],
[1,3,2],
[2,1,3],
[2,3,1],
[3,1,2],
[3,2,1]
]
"""
class Solution(object):
def _backtrack(self, permutations, temps, nums):
if len(temps) == len(nums):
# One of permutations is completed.
permutations.append(temps[:])
else:
for i in range(len(nums)):
# If num i was used, skip it; otherwise add it to temps.
if nums[i] in temps:
continue
temps.append(nums[i])
# Apply DFS by recursion with backtracking.
self._backtrack(permutations, temps, nums)
temps.pop()
def permute(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
Time complexity: O(n!).
Space complexity: O(n).
"""
permutations = []
self._backtrack(permutations, [], nums)
return permutations
def main():
nums = [1, 2, 3]
print Solution().permute(nums)
if __name__ == '__main__':
main()
| """Leetcode 46. Permutations
Medium
URL: https://leetcode.com/problems/permutations/
Given a collection of distinct integers, return all possible permutations.
Example:
Input: [1,2,3]
Output:
[
[1,2,3],
[1,3,2],
[2,1,3],
[2,3,1],
[3,1,2],
[3,2,1]
]
"""
class Solution(object):
def _backtrack(self, permutations, temps, nums):
if len(temps) == len(nums):
# One of permutations is completed.
permutations.append(temps[:])
return None
for i in range(len(nums)):
# Constraint: If num i was used, skip it; otherwise add it to temps.
if nums[i] in temps:
continue
temps.append(nums[i])
# Apply DFS by recursion with backtracking.
self._backtrack(permutations, temps, nums)
temps.pop()
def permute(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
Time complexity: O(n!).
Space complexity: O(n).
"""
permutations = []
self._backtrack(permutations, [], nums)
return permutations
def main():
nums = [1, 2, 3]
print Solution().permute(nums)
if __name__ == '__main__':
main()
| Refactor adding temp to permutations | Refactor adding temp to permutations
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | ---
+++
@@ -23,16 +23,17 @@
if len(temps) == len(nums):
# One of permutations is completed.
permutations.append(temps[:])
- else:
- for i in range(len(nums)):
- # If num i was used, skip it; otherwise add it to temps.
- if nums[i] in temps:
- continue
- temps.append(nums[i])
+ return None
- # Apply DFS by recursion with backtracking.
- self._backtrack(permutations, temps, nums)
- temps.pop()
+ for i in range(len(nums)):
+ # Constraint: If num i was used, skip it; otherwise add it to temps.
+ if nums[i] in temps:
+ continue
+ temps.append(nums[i])
+
+ # Apply DFS by recursion with backtracking.
+ self._backtrack(permutations, temps, nums)
+ temps.pop()
def permute(self, nums):
""" |
6ba67b7090bbbb7a19e3a6c5623c1e63c0452428 | dmoj/executors/OCTAVE.py | dmoj/executors/OCTAVE.py | from .base_executor import ScriptExecutor
class Executor(ScriptExecutor):
ext = '.m'
name = 'OCTAVE'
command = 'octave'
address_grace = 131072
test_program = "disp(input('', 's'))"
fs = ['.*\.m', '/lib/', '/etc/nsswitch\.conf$', '/etc/passwd$', '/usr/share/', '/etc/fltk/']
def get_cmdline(self):
return [self.get_command(), '--no-gui', '--no-history', '--no-init-file', '--no-site-file',
'--no-window-system', '--norc', '--quiet', self._code]
| from .base_executor import ScriptExecutor
class Executor(ScriptExecutor):
ext = '.m'
name = 'OCTAVE'
command = 'octave'
address_grace = 131072
test_program = "disp(input('', 's'))"
fs = ['.*\.m', '/lib/', '/etc/nsswitch\.conf$', '/etc/passwd$', '/usr/share/', '/etc/fltk/']
def get_cmdline(self):
return [self.get_command(), '--no-gui', '--no-history', '--no-init-file', '--no-site-file',
'--no-window-system', '--norc', '--quiet', self._code]
@classmethod
def get_find_first_mapping(cls):
return {
'octave': ['octave-cli'],
} | Address execve protection fault in Octave autoconfig | Address execve protection fault in Octave autoconfig
`octave-cli` is what we really want
| Python | agpl-3.0 | DMOJ/judge,DMOJ/judge,DMOJ/judge | ---
+++
@@ -13,3 +13,9 @@
def get_cmdline(self):
return [self.get_command(), '--no-gui', '--no-history', '--no-init-file', '--no-site-file',
'--no-window-system', '--norc', '--quiet', self._code]
+
+ @classmethod
+ def get_find_first_mapping(cls):
+ return {
+ 'octave': ['octave-cli'],
+ } |
a962b38f6e36e8d06feee5fed5fcacaa69ef6761 | dnzo/admin/migrations.py | dnzo/admin/migrations.py | from google.appengine.ext import db
def update_context_indexes(start_key = None):
def fn(task):
task.contexts_index = " ".join(task.contexts)
task.put()
return True
from tasks_data.models import Task
return do_for_all(Task, start_key, fn, 50)
def do_for_all(model_klass, start_key, callback, max_records = 100):
objects = get_all_from_key(model_klass, start_key, max_records)
total = 0
updated = 0
last_key = None
for obj in objects:
if callback(obj):
updated += 1
total += 1
last_key = str(obj.key())
return (total, updated, last_key)
def get_all_from_key(model_klass, start_key, max_records):
params = {}
wheres = []
if start_key is not None:
wheres.append('__key__ > :key')
params['key'] = db.Key(start_key)
if len(wheres) > 0:
wheres = 'WHERE ' + ' AND '.join(wheres)
else:
wheres = ''
return model_klass.gql('%s ORDER BY __key__ ASC' % wheres, **params).fetch(max_records)
MIGRATIONS = [
{
'name': 'Update context indexes',
'slug': 'update_context_indexes',
'migration': update_context_indexes
}
] | from google.appengine.ext import db
def update_context_indexes(start_key = None):
def fn(task):
task.contexts_index = " ".join(task.contexts)
task.put()
return True
from tasks_data.models import Task
return do_for_all(Task, start_key, fn, 20)
def do_for_all(model_klass, start_key, callback, max_records = 100):
objects = get_all_from_key(model_klass, start_key, max_records)
total = 0
updated = 0
last_key = None
for obj in objects:
if callback(obj):
updated += 1
total += 1
last_key = str(obj.key())
return (total, updated, last_key)
def get_all_from_key(model_klass, start_key, max_records):
params = {}
wheres = []
if start_key is not None:
wheres.append('__key__ > :key')
params['key'] = db.Key(start_key)
if len(wheres) > 0:
wheres = 'WHERE ' + ' AND '.join(wheres)
else:
wheres = ''
return model_klass.gql('%s ORDER BY __key__ ASC' % wheres, **params).fetch(max_records)
MIGRATIONS = [
{
'name': 'Update context indexes',
'slug': 'update_context_indexes',
'migration': update_context_indexes
}
] | Change to 20 records at a time instead of 50. | Change to 20 records at a time instead of 50.
git-svn-id: 062a66634e56759c7c3cc44955c32d2ce0012d25@256 c02d1e6f-6a35-45f2-ab14-3b6f79a691ff
| Python | mit | taylorhughes/done-zo,taylorhughes/done-zo,taylorhughes/done-zo,taylorhughes/done-zo | ---
+++
@@ -8,7 +8,7 @@
return True
from tasks_data.models import Task
- return do_for_all(Task, start_key, fn, 50)
+ return do_for_all(Task, start_key, fn, 20)
def do_for_all(model_klass, start_key, callback, max_records = 100): |
011f7fbe66cc226cdd2be2e2eeef44df11733251 | scrapyard/kickass.py | scrapyard/kickass.py | import cache
import network
import scraper
import urllib
KICKASS_URL = 'http://kickass.so'
################################################################################
def movie(movie_info):
return __search('category:{0} imdb:{1}'.format('movies', movie_info['imdb_id'][2:]))
################################################################################
def episode(show_info, episode_info):
return __search('category:{0} {1} season:{2} episode:{3}'.format('tv', show_info['title'], episode_info['season_index'], episode_info['episode_index']))
################################################################################
def __search(query):
magnet_infos = []
rss_data = network.rss_get_cached_optional(KICKASS_URL + '/usearch/{0}'.format(urllib.quote(query)), expiration=cache.HOUR, params={ 'field': 'seeders', 'sorder': 'desc', 'rss': '1' })
if rss_data:
for rss_item in rss_data.entries:
magnet_infos.append(scraper.Magnet(rss_item.torrent_magneturi, rss_item.title, int(rss_item.torrent_seeds), int(rss_item.torrent_peers)))
return magnet_infos
| import cache
import network
import scraper
import urllib
KICKASS_URL = 'http://kickass.so'
################################################################################
def movie(movie_info):
return __search('category:{0} imdb:{1}'.format('movies', movie_info['imdb_id'][2:]))
################################################################################
def episode(show_info, episode_info):
clean_title = show_info['title'].replace('?', '')
return __search('category:{0} {1} season:{2} episode:{3}'.format('tv', clean_title, episode_info['season_index'], episode_info['episode_index']))
################################################################################
def __search(query):
magnet_infos = []
rss_data = network.rss_get_cached_optional(KICKASS_URL + '/usearch/{0}'.format(urllib.quote(query)), expiration=cache.HOUR, params={ 'field': 'seeders', 'sorder': 'desc', 'rss': '1' })
if rss_data:
for rss_item in rss_data.entries:
magnet_infos.append(scraper.Magnet(rss_item.torrent_magneturi, rss_item.title, int(rss_item.torrent_seeds), int(rss_item.torrent_peers)))
return magnet_infos
| Remove ? from show title when searching | Kickass: Remove ? from show title when searching
| Python | mit | sharkone/scrapyard | ---
+++
@@ -11,7 +11,8 @@
################################################################################
def episode(show_info, episode_info):
- return __search('category:{0} {1} season:{2} episode:{3}'.format('tv', show_info['title'], episode_info['season_index'], episode_info['episode_index']))
+ clean_title = show_info['title'].replace('?', '')
+ return __search('category:{0} {1} season:{2} episode:{3}'.format('tv', clean_title, episode_info['season_index'], episode_info['episode_index']))
################################################################################
def __search(query): |
21bcb3105c9c3884f2a369a75408d91cdca5992e | tests/core/test_extensions.py | tests/core/test_extensions.py | from __future__ import unicode_literals, print_function, division, absolute_import
from nose.tools import raises
from openfisca_core.parameters import ParameterNode
from openfisca_country_template import CountryTaxBenefitSystem
tbs = CountryTaxBenefitSystem()
def test_extension_not_already_loaded():
assert tbs.get_variable('local_town_child_allowance') is None
def walk_and_count(node):
c = 0
for item_name, item in node.children.items():
if isinstance(item, ParameterNode):
c += walk_and_count(item)
else:
c += 1
return c
def test_load_extension():
assert len(tbs.variables) == 16
assert walk_and_count(tbs.parameters) == 8
tbs.load_extension('openfisca_extension_template')
assert len(tbs.variables) == 17
assert tbs.get_variable('local_town_child_allowance') is not None
assert walk_and_count(tbs.parameters) == 9
assert tbs.parameters.local_town.child_allowance.amount is not None
def test_unload_extensions():
tbs = CountryTaxBenefitSystem()
assert tbs.get_variable('local_town_child_allowance') is None
@raises(ValueError)
def test_failure_to_load_extension_when_directory_doesnt_exist():
tbs.load_extension('/this/is/not/a/real/path')
| from __future__ import unicode_literals, print_function, division, absolute_import
from nose.tools import raises
from openfisca_core.parameters import ParameterNode
from openfisca_country_template import CountryTaxBenefitSystem
tbs = CountryTaxBenefitSystem()
def test_extension_not_already_loaded():
assert tbs.get_variable('local_town_child_allowance') is None
def walk_and_count(node):
c = 0
for item_name, item in node.children.items():
if isinstance(item, ParameterNode):
c += walk_and_count(item)
else:
c += 1
return c
def test_load_extension():
assert len(tbs.variables) == 16
assert walk_and_count(tbs.parameters) == 8
tbs.load_extension('openfisca_extension_template')
assert len(tbs.variables) == 17
assert tbs.get_variable('local_town_child_allowance') is not None
assert walk_and_count(tbs.parameters) == 9
assert tbs.parameters('2016-01').local_town.child_allowance.amount == 100.0
assert tbs.parameters.local_town.child_allowance.amount is not None
def test_unload_extensions():
tbs = CountryTaxBenefitSystem()
assert tbs.get_variable('local_town_child_allowance') is None
@raises(ValueError)
def test_failure_to_load_extension_when_directory_doesnt_exist():
tbs.load_extension('/this/is/not/a/real/path')
| Test extension's parameter access for a given period | Test extension's parameter access for a given period
| Python | agpl-3.0 | openfisca/openfisca-core,openfisca/openfisca-core | ---
+++
@@ -31,6 +31,7 @@
assert tbs.get_variable('local_town_child_allowance') is not None
assert walk_and_count(tbs.parameters) == 9
+ assert tbs.parameters('2016-01').local_town.child_allowance.amount == 100.0
assert tbs.parameters.local_town.child_allowance.amount is not None
|
513244c067713a9b87f322c50be43643fdcca056 | test/api/test_api.py | test/api/test_api.py | import time
import subprocess
import pytest
import sys
SOURCE = "**"
if len(sys.argv) == 2:
SOURCE = str(sys.argv[1])
start = subprocess.Popen(['make', 'backend'])
time.sleep(5)
process = subprocess.run("pytest augur/datasources/{}/test_{}_routes.py".format(SOURCE, SOURCE), shell=True)
time.sleep(2)
subprocess.Popen(['make', 'backend-stop'])
sys.exit(process.returncode)
| import time
import subprocess
import os
import pytest
import sys
SOURCE = "**"
if len(sys.argv) == 2:
SOURCE = str(sys.argv[1])
FNULL = open(os.devnull, 'w')
start = subprocess.Popen(['augur', 'run'], stdout=FNULL, stderr=subprocess.STDOUT)
time.sleep(20)
process = subprocess.run("pytest augur/datasources/{}/test_{}_routes.py".format(SOURCE, SOURCE), shell=True)
time.sleep(5)
subprocess.Popen(['make', 'backend-stop'])
sys.exit(process.returncode)
| Refactor API testing script to be consistent with Travis build | Refactor API testing script to be consistent with Travis build
Signed-off-by: Carter Landis <ffc486ac0b21a34cfd7d1170183ed86b0f1b04a2@gmail.com>
| Python | mit | OSSHealth/ghdata,OSSHealth/ghdata,OSSHealth/ghdata | ---
+++
@@ -1,5 +1,6 @@
import time
import subprocess
+import os
import pytest
import sys
@@ -7,10 +8,12 @@
if len(sys.argv) == 2:
SOURCE = str(sys.argv[1])
-start = subprocess.Popen(['make', 'backend'])
+FNULL = open(os.devnull, 'w')
+
+start = subprocess.Popen(['augur', 'run'], stdout=FNULL, stderr=subprocess.STDOUT)
+time.sleep(20)
+process = subprocess.run("pytest augur/datasources/{}/test_{}_routes.py".format(SOURCE, SOURCE), shell=True)
time.sleep(5)
-process = subprocess.run("pytest augur/datasources/{}/test_{}_routes.py".format(SOURCE, SOURCE), shell=True)
-time.sleep(2)
subprocess.Popen(['make', 'backend-stop'])
sys.exit(process.returncode) |
c8f1d40f97e0e6be633acec8533f890287ef5200 | server/systeminfo.py | server/systeminfo.py | #!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split()[2]
uptime = uptime[0:len(uptime)-2] # remove the comma
return uptime
| #!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split(",")[0]
uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime
| Correct get_uptime function. It didn't extract the uptime correctly. Now it does. | Correct get_uptime function. It didn't extract the uptime correctly. Now it does.
| Python | mit | juliendelplanque/raspirestmonitor | ---
+++
@@ -10,6 +10,6 @@
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
- uptime = output.decode("utf-8").split()[2]
- uptime = uptime[0:len(uptime)-2] # remove the comma
+ uptime = output.decode("utf-8").split(",")[0]
+ uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime |
aae2a298d0f5b6e94d4c7041b1cef0df424666a9 | bench_bin.py | bench_bin.py | import socket
import time
import random
import struct
NUM = 1024 * 1024 * 4
KEYS = ["test", "foobar", "zipzap"]
VALS = [32, 100, 82, 101, 5, 6, 42, 73]
BINARY_HEADER = struct.Struct("<BBHd")
BIN_TYPES = {"kv": 1, "c": 2, "ms": 3}
def format(key, type, val):
"Formats a binary message for statsite"
key = str(key)
key_len = len(key) + 1
type_num = BIN_TYPES[type]
header = BINARY_HEADER.pack(170, type_num, key_len, float(val))
mesg = header + key + "\0"
return mesg
METS = []
for x in xrange(NUM):
key = random.choice(KEYS)
val = random.choice(VALS)
METS.append(format(key, "c", val))
s = socket.socket()
s.connect(("localhost", 8125))
start = time.time()
current = 0
while current < len(METS):
msg = "".join(METS[current:current + 1024])
current += 1024
s.sendall(msg)
s.close()
end = time.time()
print NUM / (end - start), "ops/sec", (end - start), "sec"
| import socket
import time
import random
import struct
NUM = 1024 * 1024
KEYS = ["test", "foobar", "zipzap"]
VALS = [32, 100, 82, 101, 5, 6, 42, 73]
BINARY_HEADER = struct.Struct("<BBHd")
BIN_TYPES = {"kv": 1, "c": 2, "ms": 3}
def format(key, type, val):
"Formats a binary message for statsite"
key = str(key)
key_len = len(key) + 1
type_num = BIN_TYPES[type]
header = BINARY_HEADER.pack(170, type_num, key_len, float(val))
mesg = header + key + "\0"
return mesg
METS = []
for x in xrange(NUM):
key = random.choice(KEYS)
val = random.choice(VALS)
METS.append(format(key, "c", val))
s = socket.socket()
s.connect(("localhost", 8125))
start = time.time()
total = 0
while True:
current = 0
while current < len(METS):
msg = "".join(METS[current:current + 1024])
current += 1024
total += 1024
s.sendall(msg)
diff = time.time() - start
ops_s = total / diff
print "%0.2f sec\t - %.0f ops/sec" % (diff, ops_s)
| Change binary benchmark to loop forever too | Change binary benchmark to loop forever too
| Python | bsd-3-clause | u-s-p/statsite,statsite/statsite,Instagram/statsite,kuba--/statsite,zeedunk/statsite,nwangtw/statsite,theatrus/statsite,lazybios/statsite,zeedunk/statsite,Instagram/statsite,statsite/statsite,tsunli/statsite,kuba--/statsite,armon/statsite,nspragg/statsite,nspragg/statsite,sleepybishop/statsite,theatrus/statsite,u-s-p/statsite,zeedunk/statsite,theatrus/statsite,nspragg/statsite,nwangtw/statsite,jmptrader/statsite,jmptrader/statsite,armon/statsite,statsite/statsite,ualtinok/statsite,librato/statsite,librato/statsite,twitter-forks/statsite,sleepybishop/statsite,tsunli/statsite,zeedunk/statsite,ualtinok/statsite,twitter-forks/statsite,kuba--/statsite,tsunli/statsite,ualtinok/statsite,armon/statsite,ualtinok/statsite,jmptrader/statsite,u-s-p/statsite,remind101/statsite,sleepybishop/statsite,kuba--/statsite,sleepybishop/statsite,tsunli/statsite,remind101/statsite,jmptrader/statsite,librato/statsite,kuba--/statsite,nwangtw/statsite,theatrus/statsite,remind101/statsite,librato/statsite,u-s-p/statsite,lazybios/statsite,johnkeates/statsite,nspragg/statsite,statsite/statsite,nwangtw/statsite,twitter-forks/statsite,tsunli/statsite,librato/statsite,drawks/statsite,drawks/statsite,zeedunk/statsite,drawks/statsite,ualtinok/statsite,armon/statsite,u-s-p/statsite,Instagram/statsite,zeedunk/statsite,johnkeates/statsite,tsunli/statsite,remind101/statsite,bossjones/statsite,kuba--/statsite,twitter-forks/statsite,drawks/statsite,drawks/statsite,remind101/statsite,ualtinok/statsite,sleepybishop/statsite,nspragg/statsite,nwangtw/statsite,lazybios/statsite,nwangtw/statsite,johnkeates/statsite,bossjones/statsite,armon/statsite,u-s-p/statsite,johnkeates/statsite,bossjones/statsite,theatrus/statsite,twitter-forks/statsite,twitter-forks/statsite,lazybios/statsite,nspragg/statsite,jmptrader/statsite,johnkeates/statsite,remind101/statsite,bossjones/statsite,lazybios/statsite,statsite/statsite,jmptrader/statsite,librato/statsite,drawks/statsite | ---
+++
@@ -3,7 +3,7 @@
import random
import struct
-NUM = 1024 * 1024 * 4
+NUM = 1024 * 1024
KEYS = ["test", "foobar", "zipzap"]
VALS = [32, 100, 82, 101, 5, 6, 42, 73]
@@ -31,13 +31,16 @@
s.connect(("localhost", 8125))
start = time.time()
-current = 0
-while current < len(METS):
- msg = "".join(METS[current:current + 1024])
- current += 1024
- s.sendall(msg)
+total = 0
+while True:
+ current = 0
+ while current < len(METS):
+ msg = "".join(METS[current:current + 1024])
+ current += 1024
+ total += 1024
+ s.sendall(msg)
-s.close()
-end = time.time()
-print NUM / (end - start), "ops/sec", (end - start), "sec"
+ diff = time.time() - start
+ ops_s = total / diff
+ print "%0.2f sec\t - %.0f ops/sec" % (diff, ops_s)
|
be3824f150fc96f10a485b24a93290c6c87afc46 | sotu/github.py | sotu/github.py | import os
import urllib
import requests
GITHUB_BASE_URI = os.environ.get('GITHUB_BASE_URI', 'https://github.com')
GITHUB_API_BASE_URI = os.environ.get('GITHUB_API_BASE_URI', 'https://api.github.com')
GITHUB_CLIENT_ID = os.environ['GITHUB_CLIENT_ID']
GITHUB_CLIENT_SECRET = os.environ['GITHUB_CLIENT_SECRET']
GITHUB_CALLBACK_URI = 'https://sotu.cocoapods.org/callback'
def retrieve_access_token(code):
parameters = {
'client_id': GITHUB_CLIENT_ID,
'client_secret': GITHUB_CLIENT_SECRET,
'code': code,
}
headers = {
'Accept': 'application/json',
}
response = requests.post(GITHUB_BASE_URI + '/login/oauth/access_token?' + urllib.urlencode(parameters), headers=headers)
return response.json().get('access_token')
def retrieve_account(access_token):
return requests.get(GITHUB_BASE_URI + '/user?' + urllib.urlencode({'access_token': access_token})).json()
def retrieve_email(access_token):
emails = requests.get(GITHUB_BASE_URI + '/user/emails?' + urllib.urlencode({'access_token': access_token})).json()
primary = next(e for e in emails if e['primary'] is True)
return primary['email']
| import os
import urllib
import requests
GITHUB_BASE_URI = os.environ.get('GITHUB_BASE_URI', 'https://github.com')
GITHUB_API_BASE_URI = os.environ.get('GITHUB_API_BASE_URI', 'https://api.github.com')
GITHUB_CLIENT_ID = os.environ['GITHUB_CLIENT_ID']
GITHUB_CLIENT_SECRET = os.environ['GITHUB_CLIENT_SECRET']
GITHUB_CALLBACK_URI = 'https://sotu.cocoapods.org/callback'
def retrieve_access_token(code):
parameters = {
'client_id': GITHUB_CLIENT_ID,
'client_secret': GITHUB_CLIENT_SECRET,
'code': code,
}
headers = {
'Accept': 'application/json',
}
response = requests.post(GITHUB_BASE_URI + '/login/oauth/access_token?' + urllib.urlencode(parameters), headers=headers)
return response.json().get('access_token')
def retrieve_account(access_token):
return requests.get(GITHUB_API_BASE_URI + '/user?' + urllib.urlencode({'access_token': access_token})).json()
def retrieve_email(access_token):
emails = requests.get(GITHUB_API_BASE_URI + '/user/emails?' + urllib.urlencode({'access_token': access_token})).json()
primary = next(e for e in emails if e['primary'] is True)
return primary['email']
| Use correct URL for API | [GitHub] Use correct URL for API
| Python | mit | CocoaPods/sotu.cocoapods.org,CocoaPods/sotu.cocoapods.org | ---
+++
@@ -24,11 +24,11 @@
def retrieve_account(access_token):
- return requests.get(GITHUB_BASE_URI + '/user?' + urllib.urlencode({'access_token': access_token})).json()
+ return requests.get(GITHUB_API_BASE_URI + '/user?' + urllib.urlencode({'access_token': access_token})).json()
def retrieve_email(access_token):
- emails = requests.get(GITHUB_BASE_URI + '/user/emails?' + urllib.urlencode({'access_token': access_token})).json()
+ emails = requests.get(GITHUB_API_BASE_URI + '/user/emails?' + urllib.urlencode({'access_token': access_token})).json()
primary = next(e for e in emails if e['primary'] is True)
return primary['email']
|
552166a61e66f305b3729718361078558298883b | couchdb/tests/testutil.py | couchdb/tests/testutil.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import uuid
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
name = 'couchdb-python/' + uuid.uuid4().hex
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import random
import sys
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
# Find an unused database name
while True:
name = 'couchdb-python/%d' % random.randint(0, sys.maxint)
if name not in self.temp_dbs:
break
print '%s already used' % name
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
| Use a random number instead of uuid for temp database name. | Use a random number instead of uuid for temp database name.
| Python | bsd-3-clause | ssaavedra/couchdb-python,oliora/couchdb-python,hdmessaging/couchbase-mapping-python | ---
+++
@@ -6,7 +6,8 @@
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
-import uuid
+import random
+import sys
from couchdb import client
class TempDatabaseMixin(object):
@@ -25,7 +26,12 @@
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
- name = 'couchdb-python/' + uuid.uuid4().hex
+ # Find an unused database name
+ while True:
+ name = 'couchdb-python/%d' % random.randint(0, sys.maxint)
+ if name not in self.temp_dbs:
+ break
+ print '%s already used' % name
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db |
bd11c37a8669bdae2d4561483f50da0891b82627 | monsetup/detection/plugins/zookeeper.py | monsetup/detection/plugins/zookeeper.py | import logging
import os
import yaml
import monsetup.agent_config
import monsetup.detection
log = logging.getLogger(__name__)
class Zookeeper(monsetup.detection.Plugin):
"""Detect Zookeeper daemons and setup configuration to monitor them.
"""
def _detect(self):
"""Run detection, set self.available True if the service is detected.
"""
if monsetup.detection.find_process_cmdline('zookeeper') is not None:
self.available = True
def build_config(self):
"""Build the config as a Plugins object and return.
"""
config = monsetup.agent_config.Plugins()
# First watch the process
log.info("\tWatching the zookeeper process.")
config.merge(monsetup.detection.watch_process(['zookeeper']))
log.info("\tEnabling the zookeeper plugin")
with open(os.path.join(self.template_dir, 'conf.d/zk.yaml.example'), 'r') as zk_template:
zk_config = yaml.load(zk_template.read())
config['zk'] = zk_config
return config
def dependencies_installed(self):
# The current plugin just does a simple socket connection to zookeeper and
# parses the stat command
return True
| import logging
import os
import yaml
import monsetup.agent_config
import monsetup.detection
log = logging.getLogger(__name__)
class Zookeeper(monsetup.detection.Plugin):
"""Detect Zookeeper daemons and setup configuration to monitor them.
"""
def _detect(self):
"""Run detection, set self.available True if the service is detected.
"""
if monsetup.detection.find_process_cmdline('org.apache.zookeeper') is not None:
self.available = True
def build_config(self):
"""Build the config as a Plugins object and return.
"""
config = monsetup.agent_config.Plugins()
# First watch the process
log.info("\tWatching the zookeeper process.")
config.merge(monsetup.detection.watch_process(['zookeeper']))
log.info("\tEnabling the zookeeper plugin")
with open(os.path.join(self.template_dir, 'conf.d/zk.yaml.example'), 'r') as zk_template:
zk_config = yaml.load(zk_template.read())
config['zk'] = zk_config
return config
def dependencies_installed(self):
# The current plugin just does a simple socket connection to zookeeper and
# parses the stat command
return True
| Fix detection of Zookeeper in monasca-setup | Fix detection of Zookeeper in monasca-setup
The Zookeeper detection plugin was looking for zookeeper in the process
command-line. This was producing false positives in the detection
process because storm uses the zookeeper library and it shows up
the command-line for storm.
Change-Id: I764a3064003beec55f0e589272855dadfa0997e7
| Python | bsd-3-clause | sapcc/monasca-agent,sapcc/monasca-agent,sapcc/monasca-agent | ---
+++
@@ -19,7 +19,7 @@
"""Run detection, set self.available True if the service is detected.
"""
- if monsetup.detection.find_process_cmdline('zookeeper') is not None:
+ if monsetup.detection.find_process_cmdline('org.apache.zookeeper') is not None:
self.available = True
def build_config(self): |
16703454a9334b6667a761bd52b0a5029e5976b2 | tests/test_api_views.py | tests/test_api_views.py | import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.api.settings")
import django
from django.test import TestCase
from rest_framework.test import APIRequestFactory
from api.webview.views import DocumentList
django.setup()
class APIViewTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_document_view(self):
view = DocumentList.as_view()
request = self.factory.get(
'/documents/'
)
response = view(request)
self.assertEqual(response.status_code, 200)
| import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.api.settings")
import pytest
import django
from django.test import TestCase
from rest_framework.test import APIRequestFactory
from api.webview.views import DocumentList
django.setup()
class APIViewTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
@pytest.mark.postgres
def test_document_view(self):
view = DocumentList.as_view()
request = self.factory.get(
'/documents/'
)
response = view(request)
self.assertEqual(response.status_code, 200)
| Add pytest mark database setup for test | Add pytest mark database setup for test
| Python | apache-2.0 | erinspace/scrapi,felliott/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,fabianvf/scrapi,mehanig/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,felliott/scrapi | ---
+++
@@ -1,6 +1,7 @@
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.api.settings")
+import pytest
import django
from django.test import TestCase
from rest_framework.test import APIRequestFactory
@@ -15,6 +16,7 @@
def setUp(self):
self.factory = APIRequestFactory()
+ @pytest.mark.postgres
def test_document_view(self):
view = DocumentList.as_view()
request = self.factory.get( |
7cff4344538c59763560a9a86fda0f464f208b66 | nightreads/user_manager/user_service.py | nightreads/user_manager/user_service.py | from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import UserTag
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
user.usertag.tags.add(*tags_objs)
user.save()
def get_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
UserTag.objects.create(user=user)
return user
| from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import UserTag
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
if tags_objs:
user.usertag.tags.clear()
user.usertag.tags.add(*tags_objs)
user.save()
def get_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
UserTag.objects.create(user=user)
return user
| Clear existing tags before updating | Clear existing tags before updating
| Python | mit | avinassh/nightreads,avinassh/nightreads | ---
+++
@@ -6,7 +6,9 @@
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
- user.usertag.tags.add(*tags_objs)
+ if tags_objs:
+ user.usertag.tags.clear()
+ user.usertag.tags.add(*tags_objs)
user.save()
|
c31d1b9a50452ae1906eca9735cbfb9acd2580dd | src/parse_user_history/history_parser.py | src/parse_user_history/history_parser.py | import os.path
import json
import urlparse
ACCEPTED_FILETYPES = [
'json',
# 'csv'
]
class HistoryParser():
def __init__(self, path):
if not os.path.isfile(path):
raise Exception("File not found.")
if path.split(".")[-1] not in ACCEPTED_FILETYPES:
raise Exception("Filetype not accepted.")
self.path = path
def _parse(self):
with open(self.path) as data_file:
data = json.load(data_file)
return data
def countVisitedPages(self):
data = self._parse()
visited = {}
for entry in data:
url = urlparse.urlparse(entry["url"]).netloc
try:
visited[url] = visited[url] + entry["visitCount"]
except:
visited[url] = entry["visitCount"]
return visited
# hp = HistoryParser("../../example/andrei_history.json")
# hp.countVisitedPages()
| import os.path
import json
import urlparse
ACCEPTED_FILETYPES = [
'json',
# 'csv'
]
class HistoryParser():
def __init__(self, path):
if not os.path.isfile(path):
raise Exception("File not found.")
if path.split(".")[-1] not in ACCEPTED_FILETYPES:
raise Exception("Filetype not accepted.")
self.path = path
def _parse(self):
with open(self.path) as data_file:
data = json.load(data_file)
return data
def countVisitedPages(self):
data = self._parse()
visited = {}
for entry in data:
url = urlparse.urlparse(entry["url"]).netloc
if len(url.split(".")) > 2: # some links are actually browser addons addresses
try:
visited[url] = visited[url] + entry["visitCount"]
except:
visited[url] = entry["visitCount"]
return visited
| Prepend http to urls in user history | Prepend http to urls in user history
| Python | mit | piatra/ssl-project | ---
+++
@@ -26,11 +26,9 @@
visited = {}
for entry in data:
url = urlparse.urlparse(entry["url"]).netloc
- try:
- visited[url] = visited[url] + entry["visitCount"]
- except:
- visited[url] = entry["visitCount"]
+ if len(url.split(".")) > 2: # some links are actually browser addons addresses
+ try:
+ visited[url] = visited[url] + entry["visitCount"]
+ except:
+ visited[url] = entry["visitCount"]
return visited
-
-# hp = HistoryParser("../../example/andrei_history.json")
-# hp.countVisitedPages() |
1cafb39b6204010d3e17b059254af6042f4a9efc | apts/__init__.py | apts/__init__.py | import os
import shutil
import configparser
from .equipment import Equipment
from .observations import Observation
from .place import Place
from .weather import Weather
from .notify import Notify
from .catalogs import Catalogs
from .utils import Utils
user_config = os.path.expanduser("~") + "/.config/apts/apts.ini"
config = configparser.ConfigParser()
config.read(user_config)
# Load static fields from config
Weather.API_KEY = config['weather']['api_key']
Weather.API_URL = config['weather']['api_url']
Notify.EMAIL_ADDRESS = config['notification']['email_address']
Notify.EMAIL_PASSWORD = config['notification']['email_password']
__version__ = '0.2.9'
| import os
import shutil
import configparser
from .equipment import Equipment
from .observations import Observation
from .place import Place
from .weather import Weather
from .notify import Notify
from .catalogs import Catalogs
from .utils import Utils
# Default values for configuration values
DEFAULTS = {
'weather': {
'api_key': 'unknown',
'api_url': 'unknown'
},
'notification': {
'email_address': 'unknown',
'email_password': 'unknown'
}
}
# Init config with default values
config = configparser.ConfigParser(DEFAULTS)
# Read users configuration
user_config = os.path.expanduser("~") + "/.config/apts/apts.ini"
config.read(user_config)
# Load static fields from config
Weather.API_KEY = config['weather']['api_key']
Weather.API_URL = config['weather']['api_url']
Notify.EMAIL_ADDRESS = config['notification']['email_address']
Notify.EMAIL_PASSWORD = config['notification']['email_password']
__version__ = '0.2.9'
| Add default values for config | Add default values for config
| Python | apache-2.0 | pozar87/apts | ---
+++
@@ -10,9 +10,23 @@
from .catalogs import Catalogs
from .utils import Utils
+# Default values for configuration values
+DEFAULTS = {
+ 'weather': {
+ 'api_key': 'unknown',
+ 'api_url': 'unknown'
+ },
+ 'notification': {
+ 'email_address': 'unknown',
+ 'email_password': 'unknown'
+ }
+}
+
+# Init config with default values
+config = configparser.ConfigParser(DEFAULTS)
+
+# Read users configuration
user_config = os.path.expanduser("~") + "/.config/apts/apts.ini"
-
-config = configparser.ConfigParser()
config.read(user_config)
# Load static fields from config |
52648b65d5920e3c87cfbdd0d71d4d91302b3991 | calaccess_raw/admin/tracking.py | calaccess_raw/admin/tracking.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Custom administration panels for tracking models.
"""
from __future__ import unicode_literals
from django.contrib import admin
from calaccess_raw import models
from .base import BaseAdmin
@admin.register(models.RawDataVersion)
class RawDataVersionAdmin(BaseAdmin):
"""
Custom admin for the RawDataVersion model.
"""
list_display = (
"id",
"release_datetime",
"pretty_download_size",
"file_count",
"record_count",
"pretty_clean_size",
)
list_display_links = ('release_datetime',)
list_filter = ("release_datetime",)
@admin.register(models.RawDataFile)
class RawDataFileAdmin(BaseAdmin):
"""
Custom admin for the RawDataFile model.
"""
list_display = (
"id",
"version",
"file_name",
"download_records_count",
"clean_records_count",
"load_records_count",
"error_count"
)
list_display_links = ('id', 'file_name',)
list_filter = ("version__release_datetime",)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Custom administration panels for tracking models.
"""
from __future__ import unicode_literals
from django.contrib import admin
from calaccess_raw import models
from .base import BaseAdmin
@admin.register(models.RawDataVersion)
class RawDataVersionAdmin(BaseAdmin):
"""
Custom admin for the RawDataVersion model.
"""
list_display = (
"id",
"release_datetime",
"pretty_download_size",
"download_file_count",
"download_record_count",
"clean_file_count",
"clean_record_count",
"pretty_clean_size",
)
list_display_links = ('release_datetime',)
list_filter = ("release_datetime",)
@admin.register(models.RawDataFile)
class RawDataFileAdmin(BaseAdmin):
"""
Custom admin for the RawDataFile model.
"""
list_display = (
"id",
"version",
"file_name",
"download_records_count",
"clean_records_count",
"load_records_count",
"error_count"
)
list_display_links = ('id', 'file_name',)
list_filter = ("version__release_datetime",)
| Update count fields on RawDataVersionAdmin | Update count fields on RawDataVersionAdmin
| Python | mit | california-civic-data-coalition/django-calaccess-raw-data | ---
+++
@@ -18,8 +18,10 @@
"id",
"release_datetime",
"pretty_download_size",
- "file_count",
- "record_count",
+ "download_file_count",
+ "download_record_count",
+ "clean_file_count",
+ "clean_record_count",
"pretty_clean_size",
)
list_display_links = ('release_datetime',) |
d4c0a8d0077439adb1e074e6f6e1a1e8b751a804 | serfnode/handler/config.py | serfnode/handler/config.py | import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf['serfnode']
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
peer = os.environ.get('PEER') or yml.get('PEER')
ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP') or ip
service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
| import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode', {})
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
peer = os.environ.get('PEER') or yml.get('PEER')
ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP') or ip
service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
| Fix handling of empty file | Fix handling of empty file | Python | mit | waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode | ---
+++
@@ -8,7 +8,7 @@
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
- return conf['serfnode']
+ return conf.get('serfnode', {})
yml = read_serfnode_yml() |
e033fa49673cdc1b682edcc2aaf1e140a73ab1b4 | src/wirecloud/platform/context/models.py | src/wirecloud/platform/context/models.py | # -*- coding: utf-8 -*-
# Copyright 2013 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from django.utils.translation import gettext_lazy as _
class Constant(models.Model):
scope = models.CharField(_('Scope'), max_length=20, null=False, blank=False)
concept = models.CharField(_('Concept'), max_length=255, null=False, blank=False)
value = models.CharField(_('Value'), max_length=256)
class Meta:
unique_together = (('scope', 'concept'),)
app_label = 'platform'
db_table = 'wirecloud_constant'
def __unicode__(self):
return self.concept.concept
| # -*- coding: utf-8 -*-
# Copyright 2013 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from django.utils.translation import gettext_lazy as _
class Constant(models.Model):
concept = models.CharField(_('Concept'), max_length=255, unique=True, null=False, blank=False)
value = models.CharField(_('Value'), max_length=256)
class Meta:
app_label = 'platform'
db_table = 'wirecloud_constant'
def __unicode__(self):
return self.concept.concept
| Remove scope attribute from Constant | Remove scope attribute from Constant
| Python | agpl-3.0 | rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud | ---
+++
@@ -24,12 +24,10 @@
class Constant(models.Model):
- scope = models.CharField(_('Scope'), max_length=20, null=False, blank=False)
- concept = models.CharField(_('Concept'), max_length=255, null=False, blank=False)
+ concept = models.CharField(_('Concept'), max_length=255, unique=True, null=False, blank=False)
value = models.CharField(_('Value'), max_length=256)
class Meta:
- unique_together = (('scope', 'concept'),)
app_label = 'platform'
db_table = 'wirecloud_constant'
|
53cae8a7d95832a0f95a537468552254028a0668 | tests/system/test_auth.py | tests/system/test_auth.py | import pytest
from inbox.models.session import session_scope
from client import InboxTestClient
from conftest import (timeout_loop, credentials, create_account, API_BASE)
@timeout_loop('sync_start')
def wait_for_sync_start(client):
return True if client.messages.first() else False
@timeout_loop('auth')
def wait_for_auth(client):
namespaces = client.namespaces.all()
if len(namespaces):
client.email_address = namespaces[0]['email_address']
client.provider = namespaces[0]['provider']
return True
return False
@pytest.mark.parametrize("account_credentials", credentials)
def test_account_auth(account_credentials):
email, password = account_credentials
with session_scope() as db_session:
create_account(db_session, email, password)
client = InboxTestClient(email, API_BASE)
wait_for_auth(client)
# wait for sync to start. tests rely on things setup at beginning
# of sync (e.g. folder hierarchy)
wait_for_sync_start(client)
| import pytest
from inbox.models.session import session_scope
from client import InboxTestClient
from conftest import (timeout_loop, credentials, create_account, API_BASE)
from accounts import broken_credentials
@timeout_loop('sync_start')
def wait_for_sync_start(client):
return True if client.messages.first() else False
@timeout_loop('auth')
def wait_for_auth(client):
namespaces = client.namespaces.all()
if len(namespaces):
client.email_address = namespaces[0]['email_address']
client.provider = namespaces[0]['provider']
return True
return False
@pytest.mark.parametrize("account_credentials", credentials)
def test_account_auth(account_credentials):
email, password = account_credentials
with session_scope() as db_session:
create_account(db_session, email, password)
client = InboxTestClient(email, API_BASE)
wait_for_auth(client)
# wait for sync to start. tests rely on things setup at beginning
# of sync (e.g. folder hierarchy)
wait_for_sync_start(client)
errors = __import__('inbox.basicauth', fromlist=['basicauth'])
def test_account_create_should_fail():
"""Test that creation fails with appropriate errors, as defined in
the broken_credentials list.
Credentials have the format:
({email, password}, error_type)
e.g.
({'user': 'foo@foo.com', 'password': 'pass'}, 'ConfigError')
"""
credentials = [((c['user'], c['password']), e)
for (c, e) in broken_credentials]
for ((email, password), error) in credentials:
error_obj = getattr(errors, error)
with session_scope() as db_session:
with pytest.raises(error_obj):
create_account(db_session, email, password)
| Add a system test to check for expected broken accounts | Add a system test to check for expected broken accounts
Summary:
This is the system test for D765 and finishes up the sync engine side
of T495 - checking for All Mail folder and failing gracefully if it's
absent.
This test specifically adds another check in `test_auth` based on a
new list of live, bad credentials in accounts.py. It's purposefully
general, so other known bad credentials could be added if we want to
test for more error cases.
There are two changes required in Jenkins to make this run:
* Add another string parameter broken_accounts to identify
broken test accounts.
* In the build script, after the line:
`echo "credentials = $test_accounts" > tests/system/accounts.py`
add:
`echo "broken_credentials = $broken_accounts" >> tests/system/accounts.py`
It looks like I can make the changes myself but I'd appreciate input
from folks more familiar with the jenkins setup.
Test Plan: If you want to test locally, ping me for some broken credentials you can use.
Reviewers: emfree
Reviewed By: emfree
Subscribers: spang
Differential Revision: https://review.inboxapp.com/D773
| Python | agpl-3.0 | gale320/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,nylas/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,jobscore/sync-engine,jobscore/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,closeio/nylas,nylas/sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,nylas/sync-engine,PriviPK/privipk-sync-engine,gale320/sync-engine,Eagles2F/sync-engine,closeio/nylas,ErinCall/sync-engine,jobscore/sync-engine,gale320/sync-engine,PriviPK/privipk-sync-engine,closeio/nylas,nylas/sync-engine,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,gale320/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,jobscore/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine | ---
+++
@@ -3,6 +3,7 @@
from inbox.models.session import session_scope
from client import InboxTestClient
from conftest import (timeout_loop, credentials, create_account, API_BASE)
+from accounts import broken_credentials
@timeout_loop('sync_start')
@@ -33,3 +34,24 @@
# wait for sync to start. tests rely on things setup at beginning
# of sync (e.g. folder hierarchy)
wait_for_sync_start(client)
+
+
+errors = __import__('inbox.basicauth', fromlist=['basicauth'])
+
+
+def test_account_create_should_fail():
+ """Test that creation fails with appropriate errors, as defined in
+ the broken_credentials list.
+ Credentials have the format:
+ ({email, password}, error_type)
+ e.g.
+ ({'user': 'foo@foo.com', 'password': 'pass'}, 'ConfigError')
+ """
+ credentials = [((c['user'], c['password']), e)
+ for (c, e) in broken_credentials]
+
+ for ((email, password), error) in credentials:
+ error_obj = getattr(errors, error)
+ with session_scope() as db_session:
+ with pytest.raises(error_obj):
+ create_account(db_session, email, password) |
8d9b163b73da8c498793cce2f22f6a7cbe524d94 | bucketcache/__init__.py | bucketcache/__init__.py | from __future__ import absolute_import, division, print_function
from .backends import *
from .buckets import *
from .config import *
from .exceptions import *
from .keymakers import *
from .log import logger, logger_config
from .utilities import *
__all__ = (backends.__all__ + buckets.__all__ + config.__all__ +
exceptions.__all__ + keymakers.__all__ + utilities.__all__)
__author__ = 'Frazer McLean <frazer@frazermclean.co.uk>'
__version__ = '0.12.0'
__license__ = 'MIT'
__description__ = 'Versatile persisent file cache.'
| from __future__ import absolute_import, division, print_function
from .backends import *
from .buckets import *
from .config import *
from .exceptions import *
from .keymakers import *
from .log import logger, logger_config
from .utilities import *
__all__ = (backends.__all__ + buckets.__all__ + config.__all__ +
exceptions.__all__ + keymakers.__all__ + utilities.__all__)
__author__ = 'Frazer McLean <frazer@frazermclean.co.uk>'
__version__ = '0.12.1'
__license__ = 'MIT'
__description__ = 'Versatile persisent file cache.'
| BUmp version number to 0.12.1 | BUmp version number to 0.12.1
| Python | mit | RazerM/bucketcache | ---
+++
@@ -12,6 +12,6 @@
exceptions.__all__ + keymakers.__all__ + utilities.__all__)
__author__ = 'Frazer McLean <frazer@frazermclean.co.uk>'
-__version__ = '0.12.0'
+__version__ = '0.12.1'
__license__ = 'MIT'
__description__ = 'Versatile persisent file cache.' |
b777c8268ec661539f868937478c3bbc204f2fb3 | tests/test_RI_response.py | tests/test_RI_response.py | from addons import *
from utils import *
tdir = 'Response-Theory'
def test_beta(workspace):
exe_py(workspace, tdir, 'Self-Consistent-Field/beta')
def test_CPHF(workspace):
exe_py(workspace, tdir, 'Self-Consistent-Field/CPHF')
def test_helper_CPHF(workspace):
exe_py(workspace, tdir, 'Self-Consistent-Field/helper_CPHF')
def test_TDHF(workspace):
exe_py(workspace, tdir, 'Self-Consistent-Field/TDHF')
def test_vcd(workspace):
exe_py(workspace, tdir, 'Self-Consistent-Field/vcd')
def test_polar_cc(workspace):
exe_py(workspace, tdir, 'Coupled-Cluster/RHF/polar')
def test_optrot_cc(workspace):
exe_py(workspace, tdir, 'Coupled-Cluster/RHF/optrot')
| from addons import *
from utils import *
tdir = 'Response-Theory'
def test_beta(workspace):
exe_py(workspace, tdir, 'Self-Consistent-Field/beta')
def test_CPHF(workspace):
exe_py(workspace, tdir, 'Self-Consistent-Field/CPHF')
def test_helper_CPHF(workspace):
exe_py(workspace, tdir, 'Self-Consistent-Field/helper_CPHF')
def test_TDHF(workspace):
exe_py(workspace, tdir, 'Self-Consistent-Field/TDHF')
@using_psi4_python_integral_deriv
def test_vcd(workspace):
exe_py(workspace, tdir, 'Self-Consistent-Field/vcd')
def test_polar_cc(workspace):
exe_py(workspace, tdir, 'Coupled-Cluster/RHF/polar')
def test_optrot_cc(workspace):
exe_py(workspace, tdir, 'Coupled-Cluster/RHF/optrot')
| Use integral derivative decorator for test | Use integral derivative decorator for test
| Python | bsd-3-clause | dsirianni/psi4numpy,psi4/psi4numpy | ---
+++
@@ -21,6 +21,7 @@
exe_py(workspace, tdir, 'Self-Consistent-Field/TDHF')
+@using_psi4_python_integral_deriv
def test_vcd(workspace):
exe_py(workspace, tdir, 'Self-Consistent-Field/vcd')
|
f5227b573588f6258b22c15583dc1d0ee259a9db | aiopg/__init__.py | aiopg/__init__.py | import re
import sys
from collections import namedtuple
from .connection import connect, Connection, TIMEOUT as DEFAULT_TIMEOUT
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool',
'version', 'version_info', 'DEFAULT_TIMEOUT')
__version__ = '0.4.0a0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'rc': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool, DEFAULT_TIMEOUT)
| import re
import sys
from collections import namedtuple
from .connection import connect, Connection, TIMEOUT as DEFAULT_TIMEOUT
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool',
'version', 'version_info', 'DEFAULT_TIMEOUT')
__version__ = '0.4.0a0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'c': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool, DEFAULT_TIMEOUT)
| Make version format PEP 440 compatible | Make version format PEP 440 compatible
| Python | bsd-2-clause | nerandell/aiopg,hyzhak/aiopg,luhn/aiopg,aio-libs/aiopg,eirnym/aiopg,graingert/aiopg | ---
+++
@@ -27,7 +27,7 @@
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
- levels = {'rc': 'candidate',
+ levels = {'c': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'} |
78cfcc7520bc2b2006f22ac4ef4fb432770c835c | bootstrap.py | bootstrap.py | #!/usr/bin/env python
"""
Bootstrap and serve your application. This file also serves to not make your
application completely reliant upon DotCloud's hosting service.
If you're in a development environment, envoke the script with:
$ python bootstrap.py
In a production environment, your application can be run with the `gevent`
Python library:
$ python bootstrap.py --gevent
"""
import argparse
from app import create_app
def parse_arguments():
"""Parse any additional arguments that may be passed to `bootstrap.py`."""
parser = argparse.ArgumentParser()
parser.add_argument('--gevent', action='store_true',
help="Run gevent's production server.")
args = parser.parse_args()
return args.gevent
def serve_app(gevent_environment):
"""
Serve your application. If `dev_environment` is true, then the
application will be served using gevent's WSGIServer.
"""
app = create_app()
if gevent_environment:
from gevent.wsgi import WSGIServer
# The port should probably be set to 80.
http_server = WSGIServer(('', 5000), app)
http_server.serve_forever()
else:
app.run(debug=True)
def main():
dev_environment = parse_arguments()
serve_app(dev_environment)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
Bootstrap and serve your application. This file also serves to not make your
application completely reliant upon DotCloud's hosting service.
If you're in a development environment, envoke the script with:
$ python bootstrap.py
In a production environment, your application can be run with the `gevent`
Python library:
$ python bootstrap.py --gevent
"""
import os
import argparse
from app import create_app
def parse_arguments():
"""Parse any additional arguments that may be passed to `bootstrap.py`."""
parser = argparse.ArgumentParser()
parser.add_argument('--gevent', action='store_true',
help="Run gevent's production server.")
args = parser.parse_args()
return args.gevent
def serve_app(gevent_environment):
"""
Serve your application. If `dev_environment` is true, then the
application will be served using gevent's WSGIServer.
"""
app = create_app()
if gevent_environment:
from gevent.wsgi import WSGIServer
# Get the port if on heroku's environment.
port = int(os.environ.get('PORT', 5000))
http_server = WSGIServer(('', port), app)
http_server.serve_forever()
else:
app.run(debug=True)
def main():
dev_environment = parse_arguments()
serve_app(dev_environment)
if __name__ == '__main__':
main()
| Allow Flask to get port from heroku environment | Allow Flask to get port from heroku environment
| Python | mit | Leonnash21/flask_heroku,Leonnash21/flask_heroku,QueryControl/querycontrol,Leonnash21/flask_heroku,bryanyang0528/ubike_api,pmrowla/goonbcs,QueryControl/querycontrol,Leonnash21/flask_heroku,bryanyang0528/ubike_api,Leonnash21/flask_heroku,pmrowla/goonbcs,QueryControl/querycontrol,bryanyang0528/ubike_api | ---
+++
@@ -13,6 +13,7 @@
"""
+import os
import argparse
from app import create_app
@@ -34,8 +35,9 @@
app = create_app()
if gevent_environment:
from gevent.wsgi import WSGIServer
- # The port should probably be set to 80.
- http_server = WSGIServer(('', 5000), app)
+ # Get the port if on heroku's environment.
+ port = int(os.environ.get('PORT', 5000))
+ http_server = WSGIServer(('', port), app)
http_server.serve_forever()
else:
app.run(debug=True) |
429ffd7f41dda00f662167f179aea73b7d018807 | pi_setup/system.py | pi_setup/system.py | #!/usr/bin/env python
import subprocess
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import subprocess
from utils.installation import OptionalInstall
def main():
subprocess.call(["apt-get", "update"])
subprocess.call(["apt-get", "-y", "upgrade"])
subprocess.call(["apt-get", "-y", "install", "python-dev"])
subprocess.call(["apt-get", "-y", "install", "python-pip"])
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
optional_install_upstart()
def optional_install_upstart():
prompt_txt = "Want to install Upstart (Y/N): "
skip_txt = "Skipping Upstart server..."
def action():
subprocess.call(["apt-get", "-y", "install", "upstart"])
OptionalInstall(prompt_txt, skip_txt, action).run()
if __name__ == '__main__':
main()
| Make upstart an optional install | Make upstart an optional install
| Python | mit | projectweekend/Pi-Setup,projectweekend/Pi-Setup | ---
+++
@@ -1,5 +1,6 @@
#!/usr/bin/env python
import subprocess
+from utils.installation import OptionalInstall
def main():
@@ -10,6 +11,17 @@
subprocess.call(["apt-get", "-y", "install", "avahi-daemon"])
subprocess.call(["apt-get", "-y", "install", "rpi-update"])
subprocess.call(["pip", "install", "virtualenv"])
+ optional_install_upstart()
+
+
+def optional_install_upstart():
+ prompt_txt = "Want to install Upstart (Y/N): "
+ skip_txt = "Skipping Upstart server..."
+
+ def action():
+ subprocess.call(["apt-get", "-y", "install", "upstart"])
+
+ OptionalInstall(prompt_txt, skip_txt, action).run()
if __name__ == '__main__': |
6d6394dd9917bdc97988818d7eb358b7d92d53c4 | pythonforandroid/recipes/android/src/setup.py | pythonforandroid/recipes/android/src/setup.py | from distutils.core import setup, Extension
import os
library_dirs = ['libs/' + os.environ['ARCH']]
lib_dict = {
'pygame': ['sdl'],
'sdl2': ['SDL2', 'SDL2_image', 'SDL2_mixer', 'SDL2_ttf']
}
sdl_libs = lib_dict[os.environ['BOOTSTRAP']]
renpy_sound = Extension('android._android_sound',
['android/_android_sound.c', 'android/_android_sound_jni.c', ],
libraries=sdl_libs + ['log'],
library_dirs=library_dirs)
modules = [Extension('android._android',
['android/_android.c', 'android/_android_jni.c'],
libraries=sdl_libs + ['log'],
library_dirs=library_dirs),
Extension('android._android_billing',
['android/_android_billing.c', 'android/_android_billing_jni.c'],
libraries=['log'],
library_dirs=library_dirs)]
if int(os.environ['IS_PYGAME']):
modules.append(renpy_sound)
setup(name='android',
version='1.0',
packages=['android'],
package_dir={'android': 'android'},
ext_modules=modules
)
| from distutils.core import setup, Extension
import os
library_dirs = ['libs/' + os.environ['ARCH']]
lib_dict = {
'pygame': ['sdl'],
'sdl2': ['SDL2', 'SDL2_image', 'SDL2_mixer', 'SDL2_ttf']
}
sdl_libs = lib_dict.get(os.environ['BOOTSTRAP'], [])
renpy_sound = Extension('android._android_sound',
['android/_android_sound.c', 'android/_android_sound_jni.c', ],
libraries=sdl_libs + ['log'],
library_dirs=library_dirs)
modules = [Extension('android._android',
['android/_android.c', 'android/_android_jni.c'],
libraries=sdl_libs + ['log'],
library_dirs=library_dirs),
Extension('android._android_billing',
['android/_android_billing.c', 'android/_android_billing_jni.c'],
libraries=['log'],
library_dirs=library_dirs)]
if int(os.environ['IS_PYGAME']):
modules.append(renpy_sound)
setup(name='android',
version='1.0',
packages=['android'],
package_dir={'android': 'android'},
ext_modules=modules
)
| Fix compile error of recipe "android" for non-sdl bootstrap build | Fix compile error of recipe "android" for non-sdl bootstrap build
| Python | mit | rnixx/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,kivy/python-for-android,PKRoma/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,kivy/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,germn/python-for-android,kivy/python-for-android,rnixx/python-for-android,germn/python-for-android,germn/python-for-android,kivy/python-for-android,kronenpj/python-for-android,germn/python-for-android,germn/python-for-android,kivy/python-for-android,germn/python-for-android | ---
+++
@@ -6,7 +6,7 @@
'pygame': ['sdl'],
'sdl2': ['SDL2', 'SDL2_image', 'SDL2_mixer', 'SDL2_ttf']
}
-sdl_libs = lib_dict[os.environ['BOOTSTRAP']]
+sdl_libs = lib_dict.get(os.environ['BOOTSTRAP'], [])
renpy_sound = Extension('android._android_sound',
['android/_android_sound.c', 'android/_android_sound_jni.c', ], |
b6ab579fa65f816704142716fbd68645ac5f2ff8 | zenaida/contrib/feedback/models.py | zenaida/contrib/feedback/models.py | from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
| from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
class Meta:
ordering = ["-timestamp"]
| Order feedback items by their timestamp. | Order feedback items by their timestamp.
| Python | bsd-3-clause | littleweaver/django-zenaida,littleweaver/django-zenaida,littleweaver/django-zenaida,littleweaver/django-zenaida | ---
+++
@@ -30,3 +30,6 @@
username=self.user.get_full_name(),
path = self.request_path
)
+
+ class Meta:
+ ordering = ["-timestamp"] |
9ac802f89f8c00dc3f0c4534d234c1dd1dc7a203 | c3py/chart_component.py | c3py/chart_component.py | class ChartComponent(object):
def __init__(self):
self.config = {}
def __string_wrap__(self, string_to_wrap):
return "'" + string_to_wrap + "'"
class ChartComponentDict(ChartComponent):
def __init__(self):
super(ChartComponentDict, self).__init__()
class ChartComponentList(ChartComponent):
def __init__(self):
super(ChartComponentList, self).__init__()
self.config = [] | class ChartComponent(object):
def __init__(self):
self.config = {}
def __string_wrap__(self, string_to_wrap):
return "'" + str(string_to_wrap) + "'"
class ChartComponentDict(ChartComponent):
def __init__(self):
super(ChartComponentDict, self).__init__()
class ChartComponentList(ChartComponent):
def __init__(self):
super(ChartComponentList, self).__init__()
self.config = [] | Enforce string before wrapping in quotation marks | Enforce string before wrapping in quotation marks
| Python | mit | h0s/c3py,harshil-shah/c3py,harshil-shah/c3py,h0s/c3py | ---
+++
@@ -6,7 +6,7 @@
def __string_wrap__(self, string_to_wrap):
- return "'" + string_to_wrap + "'"
+ return "'" + str(string_to_wrap) + "'"
class ChartComponentDict(ChartComponent): |
fccf1d6562e8d5e1349d1f0826993ec799a5fc07 | app/timetables/tests/test_models.py | app/timetables/tests/test_models.py | from django.test import TestCase
from django.db import IntegrityError
from app.timetables.models import Weekday, Meal
class WeekdayTest(TestCase):
"""Tests the Weekday model."""
def setUp(self):
Weekday.objects.create(name='monday')
def test_weekday_name_should_be_capitalized_on_save(self):
day = Weekday.objects.get(name__iexact='monday')
self.assertEqual(day.name, 'Monday')
def test_duplicate_weekday_name_cannot_be_saved(self):
day = Weekday(name='Monday')
self.assertRaises(IntegrityError, day.save)
class MealTest(TestCase):
"""Tests the Meal model."""
def setUp(self):
Meal.objects.create(
name='breakfast', start_time='21:30:05', end_time='22:30:05'
)
def test_meal_name_should_be_capitalized_on_save(self):
meal = Meal.objects.get(name__iexact="breakfast")
self.assertEqual(meal.name, 'Breakfast')
def test_duplicate_meal_name_cannot_be_saved(self):
meal = Meal(name='Breakfast')
self.assertRaises(IntegrityError, meal.save)
| from datetime import datetime
from django.test import TestCase
from django.db import IntegrityError
from app.timetables.models import Weekday, Meal
class WeekdayTest(TestCase):
"""Tests the Weekday model."""
def setUp(self):
Weekday.objects.create(name='monday')
def test_weekday_name_should_be_capitalized_on_save(self):
day = Weekday.objects.get(name__iexact='monday')
self.assertEqual(day.name, 'Monday')
def test_duplicate_weekday_name_cannot_be_saved(self):
day = Weekday(name='Monday')
self.assertRaises(IntegrityError, day.save)
class MealTest(TestCase):
"""Tests the Meal model."""
def setUp(self):
Meal.objects.create(
name='breakfast',
start_time=datetime.strptime('21:30:05', '%H:%M:%S').time(),
end_time=datetime.strptime('22:30:05', '%H:%M:%S').time()
)
def test_meal_name_should_be_capitalized_on_save(self):
meal = Meal.objects.get(name__iexact="breakfast")
self.assertEqual(meal.name, 'Breakfast')
def test_duplicate_meal_name_cannot_be_saved(self):
meal = Meal(name='Breakfast')
self.assertRaises(IntegrityError, meal.save)
| Use time object for model test | Use time object for model test
| Python | mit | teamtaverna/core | ---
+++
@@ -1,3 +1,5 @@
+from datetime import datetime
+
from django.test import TestCase
from django.db import IntegrityError
@@ -26,7 +28,9 @@
def setUp(self):
Meal.objects.create(
- name='breakfast', start_time='21:30:05', end_time='22:30:05'
+ name='breakfast',
+ start_time=datetime.strptime('21:30:05', '%H:%M:%S').time(),
+ end_time=datetime.strptime('22:30:05', '%H:%M:%S').time()
)
def test_meal_name_should_be_capitalized_on_save(self): |
3be244ab1d7b03648350356dd3d9b6025516def5 | capstone/rl/tabularf.py | capstone/rl/tabularf.py | import random
class TabularF(dict):
'''
Tabular representation for any of the two types of value functions:
1. state value function (V-Functions).
e.g.
vf = TabularF()
vf[state] = 1
2. state-action value functions (Q-functions)
e.g.
qf = TabularF()
qf[(state, action)] = 1
'''
def __getitem__(self, key):
if key not in self:
self[key] = random.random() - 0.5
return super(TabularF, self).__getitem__(key)
| import random
_MEAN = 0.0
_STD = 0.3
class TabularF(dict):
'''
Tabular representation for any of the two types of value functions:
1. state value function (V-Functions).
e.g.
vf = TabularF()
vf[state] = 1
2. state-action value functions (Q-functions)
e.g.
qf = TabularF()
qf[(state, action)] = 1
'''
def __getitem__(self, key):
if key not in self:
self[key] = random.gauss(_MEAN, _STD)
return super(TabularF, self).__getitem__(key)
| Initialize values using a gaussian distribution with mean = 0 and std = 0.3 | Initialize values using a gaussian distribution with mean = 0 and std = 0.3
| Python | mit | davidrobles/mlnd-capstone-code | ---
+++
@@ -1,5 +1,7 @@
import random
+_MEAN = 0.0
+_STD = 0.3
class TabularF(dict):
'''
@@ -19,5 +21,5 @@
def __getitem__(self, key):
if key not in self:
- self[key] = random.random() - 0.5
+ self[key] = random.gauss(_MEAN, _STD)
return super(TabularF, self).__getitem__(key) |
a6ac83978cfab99fbe1e2fc9715a0cdea3d7b472 | coss/urls.py | coss/urls.py | from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailcore import urls as wagtail_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from coss.search import views as search_views
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'^search/$', search_views.search, name='search'),
# For anything not caught by a more specific rule above, hand over to
# Wagtail's page serving mechanism. This should be the last pattern in
# the list:
url(r'', include(wagtail_urls)),
# Alternatively, if you want Wagtail pages to be served from a subpath
# of your site, rather than the site root:
# url(r'^pages/', include(wagtail_urls)),
]
if settings.DEBUG:
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailcore import urls as wagtail_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from coss.search import views as search_views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^cms-admin/', include(wagtailadmin_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'^search/$', search_views.search, name='search'),
# For anything not caught by a more specific rule above, hand over to
# Wagtail's page serving mechanism. This should be the last pattern in
# the list:
url(r'', include(wagtail_urls)),
# Alternatively, if you want Wagtail pages to be served from a subpath
# of your site, rather than the site root:
# url(r'^pages/', include(wagtail_urls)),
]
if settings.DEBUG:
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| Move CMS admin panel to /cms. | Move CMS admin panel to /cms.
| Python | mpl-2.0 | akatsoulas/coss,akatsoulas/coss,akatsoulas/coss,akatsoulas/coss | ---
+++
@@ -11,9 +11,9 @@
from coss.search import views as search_views
urlpatterns = [
- url(r'^django-admin/', include(admin.site.urls)),
+ url(r'^admin/', include(admin.site.urls)),
- url(r'^admin/', include(wagtailadmin_urls)),
+ url(r'^cms-admin/', include(wagtailadmin_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'^search/$', search_views.search, name='search'), |
11cf68b017f1feccc27fcfacea2c67aafc8e682d | tools/contributer_list.py | tools/contributer_list.py | #!/usr/bin/env python
"""Print a list of contributors for a particular milestone.
Usage:
python tools/contributor_list.py MILESTONE
"""
import sys
from gh_api import (
get_milestone_id,
get_issues_list,
)
if __name__ == "__main__":
if len(sys.argv) != 2:
print(__doc__)
sys.exit(1)
milestone = sys.argv[1]
milestone_id = get_milestone_id(
"jupyter/nbgrader",
milestone,
auth=True)
# this returns both issues and PRs
issues = get_issues_list(
"jupyter/nbgrader",
state='closed',
milestone=milestone_id,
auth=True)
users = set()
for issue in issues:
users.add(issue['user']['login'])
users = {user.lower(): user for user in users}
print()
print("The following users have submitted issues and/or PRs:")
print("-----------------------------------------------------")
for user in sorted(users.keys()):
print("- {}".format(users[user]))
print("-----------------------------------------------------")
| #!/usr/bin/env python
"""Print a list of contributors for a particular milestone.
Usage:
python tools/contributor_list.py [MILESTONE] [MILESTONE] ...
"""
import sys
from gh_api import (
get_milestones,
get_milestone_id,
get_issues_list,
)
if __name__ == "__main__":
if len(sys.argv) < 2:
milestones = get_milestones("jupyter/nbgrader", auth=True)
else:
milestones = sys.argv[1:]
users = set()
for milestone in milestones:
if milestone['title'] == "No action":
continue
print("Getting users for {}...".format(milestone['title']))
# this returns both issues and PRs
issues = get_issues_list(
"jupyter/nbgrader",
state='all',
milestone=milestone['number'],
auth=True)
for issue in issues:
users.add(issue['user']['login'])
users = {user.lower(): user for user in users}
print()
print("The following users have submitted issues and/or PRs:")
print("-----------------------------------------------------")
for user in sorted(users.keys()):
print("{}".format(users[user]))
print("-----------------------------------------------------")
| Include multiple milestones and open issues in contributor list | Include multiple milestones and open issues in contributor list [ci skip]
| Python | bsd-3-clause | jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jupyter/nbgrader | ---
+++
@@ -3,44 +3,45 @@
Usage:
- python tools/contributor_list.py MILESTONE
+ python tools/contributor_list.py [MILESTONE] [MILESTONE] ...
"""
import sys
from gh_api import (
+ get_milestones,
get_milestone_id,
get_issues_list,
)
if __name__ == "__main__":
- if len(sys.argv) != 2:
- print(__doc__)
- sys.exit(1)
-
- milestone = sys.argv[1]
-
- milestone_id = get_milestone_id(
- "jupyter/nbgrader",
- milestone,
- auth=True)
-
- # this returns both issues and PRs
- issues = get_issues_list(
- "jupyter/nbgrader",
- state='closed',
- milestone=milestone_id,
- auth=True)
+ if len(sys.argv) < 2:
+ milestones = get_milestones("jupyter/nbgrader", auth=True)
+ else:
+ milestones = sys.argv[1:]
users = set()
- for issue in issues:
- users.add(issue['user']['login'])
+ for milestone in milestones:
+ if milestone['title'] == "No action":
+ continue
+
+ print("Getting users for {}...".format(milestone['title']))
+
+ # this returns both issues and PRs
+ issues = get_issues_list(
+ "jupyter/nbgrader",
+ state='all',
+ milestone=milestone['number'],
+ auth=True)
+
+ for issue in issues:
+ users.add(issue['user']['login'])
users = {user.lower(): user for user in users}
print()
print("The following users have submitted issues and/or PRs:")
print("-----------------------------------------------------")
for user in sorted(users.keys()):
- print("- {}".format(users[user]))
+ print("{}".format(users[user]))
print("-----------------------------------------------------") |
3b22d14a3f20159d2f334002738d4e7fcfa7b09c | bitHopper/Tracking/getwork_store.py | bitHopper/Tracking/getwork_store.py | #Copyright (C) 2011,2012 Colin Rice
#This software is licensed under an included MIT license.
#See the file entitled LICENSE
#If you were not provided with a copy of the license please contact:
# Colin Rice colin@daedrum.net
import gevent, time, logging
class Getwork_Store:
"""
Class that stores getworks so we can figure out the server again
"""
def __init__(self):
self.data = {}
gevent.spawn(self.prune)
def add(self, merkle_root, data):
"""
Adds a merkle_root and a data value
"""
self.data[merkle_root] = (data, time.time())
def get(self, merkle_root):
""
if self.data.has_key(merkle_root):
return self.data[merkle_root][0]
logging.debug('Merkle Root Not Found %s', merkle_root)
return None
def drop_roots(self):
"""
Resets the merkle_root database
Very crude.
Should probably have an invalidate block function instead
"""
self.data = {}
def prune(self):
"""
Running greenlet that prunes old merkle_roots
"""
while True:
for key, work in self.data.items():
if work[1] < (time.time() - (60*3)):
del self.data[key]
gevent.sleep(60)
| #Copyright (C) 2011,2012 Colin Rice
#This software is licensed under an included MIT license.
#See the file entitled LICENSE
#If you were not provided with a copy of the license please contact:
# Colin Rice colin@daedrum.net
import gevent, time, logging
class Getwork_Store:
"""
Class that stores getworks so we can figure out the server again
"""
def __init__(self):
self.data = {}
gevent.spawn(self.prune)
def add(self, merkle_root, data):
"""
Adds a merkle_root and a data value
"""
self.data[merkle_root] = (data, time.time())
def get(self, merkle_root):
""
if self.data.has_key(merkle_root):
return self.data[merkle_root][0]
logging.debug('Merkle Root Not Found %s', merkle_root)
return None
def drop_roots(self):
"""
Resets the merkle_root database
Very crude.
Should probably have an invalidate block function instead
"""
self.data = {}
def prune(self):
"""
Running greenlet that prunes old merkle_roots
"""
while True:
for key, work in self.data.items():
if work[1] < (time.time() - (60*20)):
del self.data[key]
gevent.sleep(60)
| Store merkle roots for 10 minutes | Store merkle roots for 10 minutes
| Python | mit | c00w/bitHopper,c00w/bitHopper | ---
+++
@@ -1,7 +1,7 @@
#Copyright (C) 2011,2012 Colin Rice
#This software is licensed under an included MIT license.
#See the file entitled LICENSE
-#If you were not provided with a copy of the license please contact:
+#If you were not provided with a copy of the license please contact:
# Colin Rice colin@daedrum.net
import gevent, time, logging
@@ -10,7 +10,7 @@
"""
Class that stores getworks so we can figure out the server again
"""
-
+
def __init__(self):
self.data = {}
gevent.spawn(self.prune)
@@ -27,21 +27,21 @@
return self.data[merkle_root][0]
logging.debug('Merkle Root Not Found %s', merkle_root)
return None
-
+
def drop_roots(self):
"""
Resets the merkle_root database
Very crude.
Should probably have an invalidate block function instead
"""
- self.data = {}
-
+ self.data = {}
+
def prune(self):
"""
Running greenlet that prunes old merkle_roots
"""
while True:
for key, work in self.data.items():
- if work[1] < (time.time() - (60*3)):
+ if work[1] < (time.time() - (60*20)):
del self.data[key]
gevent.sleep(60) |
94b37ba0abacbff53da342574b61c87810f6a5d4 | bulletin/tools/plugins/forms/job.py | bulletin/tools/plugins/forms/job.py | from datetimewidget.widgets import DateTimeWidget
from django.forms import ModelForm
from form_utils.widgets import ImageWidget
from ..models import Job
job_field_labels = {
'image': 'Image (10Mb Limit)',
'url': 'URL'
}
job_help_texts = {
'url': 'Provide a full url, e.g., "http://www.example.com/page.html"'
}
class JobSubmitForm(ModelForm):
class Meta:
model = Job
fields = ['title',
'url',
'organization',
'image']
labels = {
}
labels = job_field_labels
help_texts = job_help_texts
class JobUpdateForm(ModelForm):
class Meta:
model = Job
fields = ['title',
'url',
'organization',
'image',
'approved',
'include_in_newsletter',
'pub_date']
widgets = {
'pub_date': DateTimeWidget(usel10n=True, bootstrap_version=3),
'image': ImageWidget(),
}
labels = job_field_labels
help_texts = job_help_texts
| from datetimewidget.widgets import DateTimeWidget
from django.forms import ModelForm
from form_utils.widgets import ImageWidget
from ..models import Job
job_field_labels = {
'image': 'Image (10Mb Limit)',
'url': 'URL'
}
job_help_texts = {
'url': 'Provide a full url, e.g., "http://www.example.com/page.html"'
}
field_widgets = {
'image': ImageWidget(attrs={'required': 'required'})
}
class JobSubmitForm(ModelForm):
class Meta:
model = Job
fields = ['title',
'url',
'organization',
'image']
labels = job_field_labels
help_texts = job_help_texts
widgets = field_widgets
class JobUpdateForm(ModelForm):
class Meta:
model = Job
fields = ['title',
'url',
'organization',
'image',
'approved',
'include_in_newsletter',
'pub_date']
widgets = {
'pub_date': DateTimeWidget(usel10n=True, bootstrap_version=3),
'image': ImageWidget(),
}
labels = job_field_labels
help_texts = job_help_texts
| Make image required on Job submit form. | Make image required on Job submit form.
| Python | mit | AASHE/django-bulletin,AASHE/django-bulletin,AASHE/django-bulletin | ---
+++
@@ -13,6 +13,10 @@
'url': 'Provide a full url, e.g., "http://www.example.com/page.html"'
}
+field_widgets = {
+ 'image': ImageWidget(attrs={'required': 'required'})
+}
+
class JobSubmitForm(ModelForm):
@@ -22,11 +26,9 @@
'url',
'organization',
'image']
- labels = {
-
- }
labels = job_field_labels
help_texts = job_help_texts
+ widgets = field_widgets
class JobUpdateForm(ModelForm): |
7a4a3d76f89c14426191a6f0c4dfe09371416a0e | lib/pegasus/python/Pegasus/__init__.py | lib/pegasus/python/Pegasus/__init__.py | # Copyright 2009 University Of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| # Copyright 2009 University Of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
class Logger(logging.getLoggerClass()):
"A custom logger for Pegasus with TRACE level"
CRITICAL = logging.CRITICAL
ERROR = logging.ERROR
WARNING = logging.WARNING
INFO = logging.INFO
DEBUG = logging.DEBUG
TRACE = logging.DEBUG - 1
NOTSET = logging.NOTSET
def __init__(self, name, level=0):
logging.Logger.__init__(self, name, level)
def trace(self, message, *args, **kwargs):
"Log a TRACE level message"
self.log(Logger.TRACE, message, *args, **kwargs)
# Add a TRACE level to logging
logging.addLevelName(Logger.TRACE, "TRACE")
# Use our own logger class, which has trace
logging.setLoggerClass(Logger)
| Add TRACE level to Pegasus logging | Add TRACE level to Pegasus logging
| Python | apache-2.0 | pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus | ---
+++
@@ -11,3 +11,28 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+import logging
+
+class Logger(logging.getLoggerClass()):
+ "A custom logger for Pegasus with TRACE level"
+ CRITICAL = logging.CRITICAL
+ ERROR = logging.ERROR
+ WARNING = logging.WARNING
+ INFO = logging.INFO
+ DEBUG = logging.DEBUG
+ TRACE = logging.DEBUG - 1
+ NOTSET = logging.NOTSET
+
+ def __init__(self, name, level=0):
+ logging.Logger.__init__(self, name, level)
+
+ def trace(self, message, *args, **kwargs):
+ "Log a TRACE level message"
+ self.log(Logger.TRACE, message, *args, **kwargs)
+
+# Add a TRACE level to logging
+logging.addLevelName(Logger.TRACE, "TRACE")
+
+# Use our own logger class, which has trace
+logging.setLoggerClass(Logger)
+ |
9a4ea40b2eb164f8c18e9812c27aa430c3c27772 | api/serializers.py | api/serializers.py | from django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'show_beta_interface',
'user',
'created_date',
'modified_date'
)
| from django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'user',
'show_beta_interface',
'airport_ui',
'created_date',
'modified_date'
)
| Include new airport_ui preference in serializer for view | Include new airport_ui preference in serializer for view
| Python | apache-2.0 | CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend | ---
+++
@@ -52,8 +52,9 @@
fields = (
'id',
'url',
+ 'user',
'show_beta_interface',
- 'user',
+ 'airport_ui',
'created_date',
'modified_date'
) |
1dac2942eb9a15517392eebc9aa96dcb658ebfee | src/passgen.py | src/passgen.py | import string
import random
import argparse
def passgen(length=12):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
parser = argparse.ArgumentParser(
description="Generate strong random password."
)
parser.add_argument("-l", "--length",
help="the number of characters to generate "
"for each password",
type=int, default=12)
parser.add_argument("-n", "--number",
help="how many passwords to generate",
type=int, default=10)
args = parser.parse_args()
for _ in range(args.number):
print passgen(args.length)
| import string
import random
import argparse
def passgen(length=12):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
# Using technique from Stack Overflow answer
# http://stackoverflow.com/a/23728630
chars = [random.SystemRandom().choice(pool) for _ in range(length)]
return "".join(chars)
def main():
parser = argparse.ArgumentParser(
description="Generate strong random password."
)
parser.add_argument("-l", "--length",
help="the number of characters to generate "
"for each password",
type=int, default=12)
parser.add_argument("-n", "--number",
help="how many passwords to generate",
type=int, default=10)
args = parser.parse_args()
for _ in range(args.number):
print passgen(args.length)
| Make it more clear how password is generated | Make it more clear how password is generated
| Python | mit | soslan/passgen | ---
+++
@@ -6,7 +6,11 @@
def passgen(length=12):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
- return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
+
+ # Using technique from Stack Overflow answer
+ # http://stackoverflow.com/a/23728630
+ chars = [random.SystemRandom().choice(pool) for _ in range(length)]
+ return "".join(chars)
def main():
@@ -20,7 +24,7 @@
parser.add_argument("-n", "--number",
help="how many passwords to generate",
type=int, default=10)
+ args = parser.parse_args()
- args = parser.parse_args()
for _ in range(args.number):
print passgen(args.length) |
0c22486320b064c078fe009faf41e2d0c7f5e272 | passwordless/views.py | passwordless/views.py | from django.shortcuts import render
from django.views.generic.edit import FormView
from . import forms
# Create your views here.
def logout(request):
return render(request, 'passwordless/logout.html')
def authn(request, token):
return render(request, 'passwordless/authn.html')
class LoginView(FormView):
template_name = 'passwordless/login.html'
form_class = forms.LoginForm
success_url = '/'
def form_valid(self, form):
form.send_email()
return super().form_valid(form)
class RegisterView(FormView):
template_name = 'passwordless/register.html'
form_class = forms.RegistrationForm
success_url = '/'
def form_valid(self, form):
form.create_user()
form.send_email()
return super().form_valid(form)
| from django.shortcuts import render
from django.views.generic.edit import FormView
from . import forms
# Create your views here.
def logout(request):
return render(request, 'passwordless/logout.html')
def authn(request, token):
return render(request, 'passwordless/authn.html')
class LoginView(FormView):
template_name = 'passwordless/login.html'
form_class = forms.LoginForm
success_url = '/'
def form_valid(self, form):
form.send_email()
return super().form_valid(form)
class RegisterView(LoginView):
template_name = 'passwordless/register.html'
form_class = forms.RegistrationForm
def form_valid(self, form):
form.create_user()
return super().form_valid(form)
| Refactor RegisterView as subclass of LoginView | Refactor RegisterView as subclass of LoginView
They share much of the work, they should share the code as well
| Python | mit | Kromey/fbxnano,Kromey/akwriters,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters | ---
+++
@@ -23,14 +23,12 @@
return super().form_valid(form)
-class RegisterView(FormView):
+class RegisterView(LoginView):
template_name = 'passwordless/register.html'
form_class = forms.RegistrationForm
- success_url = '/'
def form_valid(self, form):
form.create_user()
- form.send_email()
return super().form_valid(form)
|
1d5b3630d372d763ad445b969eaad97fd569db52 | examples/cross_thread.py | examples/cross_thread.py | #!/usr/bin/python
"""
Example of an Eliot action context spanning multiple threads.
"""
from threading import Thread
from sys import stdout
from eliot import to_file, preserve_context, start_action
to_file(stdout)
def add_in_thread(x, y):
with start_action(action_type="in_thread", x=x, y=y) as context:
context.add_success_fields(result=x+y)
with start_action(action_type="main_thread"):
# Preserve Eliot context and restore in new thread:
thread = Thread(target=preserve_context(add_in_thread),
kwargs={"x": 3, "y": 4})
thread.start()
# Wait for the thread to exit:
thread.join()
| #!/usr/bin/env python
"""
Example of an Eliot action context spanning multiple threads.
"""
from __future__ import unicode_literals
from threading import Thread
from sys import stdout
from eliot import to_file, preserve_context, start_action
to_file(stdout)
def add_in_thread(x, y):
with start_action(action_type="in_thread", x=x, y=y) as context:
context.add_success_fields(result=x+y)
with start_action(action_type="main_thread"):
# Preserve Eliot context and restore in new thread:
thread = Thread(target=preserve_context(add_in_thread),
kwargs={"x": 3, "y": 4})
thread.start()
# Wait for the thread to exit:
thread.join()
| Fix hashbang, add future import. | Fix hashbang, add future import.
| Python | apache-2.0 | ScatterHQ/eliot,ScatterHQ/eliot,ScatterHQ/eliot,ClusterHQ/eliot | ---
+++
@@ -1,8 +1,10 @@
-#!/usr/bin/python
+#!/usr/bin/env python
"""
Example of an Eliot action context spanning multiple threads.
"""
+
+from __future__ import unicode_literals
from threading import Thread
from sys import stdout |
0f844e44f0b1b8873aa777becf7e7a8fcc48483b | virtool/indexes/models.py | virtool/indexes/models.py | import enum
from sqlalchemy import Column, Integer, String, Enum
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String)
index = Column(String)
type = Column(Enum(IndexType))
size = Column(Integer)
def __repr__(self):
return f"<IndexFile(id={self.id}, name={self.name}, index={self.index}, type={self.type}, " \
f"size={self.size} "
| import enum
from sqlalchemy import Column, Integer, String, Enum
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
def __repr__(self):
return f"<IndexFile(id={self.id}, name={self.name}, index={self.index}, type={self.type}, " \
f"size={self.size} "
| Make 'name' and 'index' columns non-nullable for IndexFile model | Make 'name' and 'index' columns non-nullable for IndexFile model
| Python | mit | virtool/virtool,virtool/virtool,igboyes/virtool,igboyes/virtool | ---
+++
@@ -23,8 +23,8 @@
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
- name = Column(String)
- index = Column(String)
+ name = Column(String, nullable=False)
+ index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
|
e01571fb8c29b78f16c34bfcd2d806b183224047 | opps/containers/forms.py | opps/containers/forms.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from django.conf import settings
from opps.db.models.fields.jsonf import JSONFormField
from opps.fields.widgets import JSONField
from opps.fields.models import Field, FieldOption
class ContainerAdminForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(ContainerAdminForm, self).__init__(*args, **kwargs)
self.fields['json'] = JSONFormField(
widget=JSONField(
attrs={'_model': self._meta.model.__name__}),
required=False)
for field in Field.objects.filter(application__contains=
self._meta.model.__name__):
if field.type == 'checkbox':
for fo in FieldOption.objects.filter(field=field):
self.fields[
'json_{}_{}'.format(
field.slug, fo.option.slug
)] = forms.CharField(required=False)
else:
self.fields[
'json_{}'.format(field.slug)
] = forms.CharField(required=False)
if settings.OPPS_MIRROR_CHANNEL:
self.field['mirror_channel'] = forms.CharField(
widget=forms.HiddenInput(), required=False)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from django.conf import settings
from opps.db.models.fields.jsonf import JSONFormField
from opps.fields.widgets import JSONField
from opps.fields.models import Field, FieldOption
class ContainerAdminForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(ContainerAdminForm, self).__init__(*args, **kwargs)
if not settings.OPPS_MIRROR_CHANNEL:
self.fields['mirror_channel'].widget = forms.HiddenInput()
self.fields['json'] = JSONFormField(
widget=JSONField(
attrs={'_model': self._meta.model.__name__}),
required=False)
for field in Field.objects.filter(application__contains=
self._meta.model.__name__):
if field.type == 'checkbox':
for fo in FieldOption.objects.filter(field=field):
self.fields[
'json_{}_{}'.format(
field.slug, fo.option.slug
)] = forms.CharField(required=False)
else:
self.fields[
'json_{}'.format(field.slug)
] = forms.CharField(required=False)
| Fix mirror_channel widget on OPPS_MIRROR_CHANNEL false | Fix mirror_channel widget on OPPS_MIRROR_CHANNEL false
| Python | mit | opps/opps,YACOWS/opps,YACOWS/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,opps/opps,williamroot/opps,opps/opps,jeanmask/opps,opps/opps,jeanmask/opps,williamroot/opps | ---
+++
@@ -11,6 +11,9 @@
class ContainerAdminForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(ContainerAdminForm, self).__init__(*args, **kwargs)
+
+ if not settings.OPPS_MIRROR_CHANNEL:
+ self.fields['mirror_channel'].widget = forms.HiddenInput()
self.fields['json'] = JSONFormField(
widget=JSONField(
@@ -28,7 +31,3 @@
self.fields[
'json_{}'.format(field.slug)
] = forms.CharField(required=False)
-
- if settings.OPPS_MIRROR_CHANNEL:
- self.field['mirror_channel'] = forms.CharField(
- widget=forms.HiddenInput(), required=False) |
9ada81a111c0b5c8d523bbb532a7be3873f603fe | __init__.py | __init__.py | from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
""" Respond to incoming calls with a SMS """
body = request.values.get('Body', None)
resp = twilio.twiml.Response()
if body == '#bff':
resp.message('#awesome')
resp.message("What's your name?")
else:
resp.message('#lame')
return str(resp)
if __name__ == "__main__":
app.run(debug=True)
| from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello():
""" Respond to incoming calls with a SMS """
body = request.values.get('Body', None)
resp = twilio.twiml.Response()
if body == '#bff':
resp.message('#awesome')
resp.message("What's your name?")
user_name = request.values.get('Body', None)
resp.message("It's great to meet you {name}!".format(name=user_name))
resp.message("Wanna set up a play date?")
play_date_response = request.values.get('Body', None)
if play_date_response.lower() == 'yes':
resp.message("That's great! "
"Get your parent's permission, "
"and go to http://chatsters.com.")
elif play_date_response.lower() == 'no':
resp.message("Too bad. Maybe another time!")
else:
resp.message("#lame")
else:
resp.message('#lame')
return str(resp)
if __name__ == "__main__":
app.run(debug=True)
| Add more conversation into the mix. | Add more conversation into the mix.
| Python | mit | patrickbeeson/text-me | ---
+++
@@ -15,6 +15,18 @@
if body == '#bff':
resp.message('#awesome')
resp.message("What's your name?")
+ user_name = request.values.get('Body', None)
+ resp.message("It's great to meet you {name}!".format(name=user_name))
+ resp.message("Wanna set up a play date?")
+ play_date_response = request.values.get('Body', None)
+ if play_date_response.lower() == 'yes':
+ resp.message("That's great! "
+ "Get your parent's permission, "
+ "and go to http://chatsters.com.")
+ elif play_date_response.lower() == 'no':
+ resp.message("Too bad. Maybe another time!")
+ else:
+ resp.message("#lame")
else:
resp.message('#lame')
|
d672cbc84ced1af4f7e5f0cf97c5d087a477717c | tools/sharding_supervisor/sharding_supervisor.py | tools/sharding_supervisor/sharding_supervisor.py | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from sharding_supervisor_old import * # pylint: disable=W0401,W0614
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Defer to run_test_cases.py."""
import os
import optparse
import sys
ROOT_DIR = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def pop_gtest_output(args):
"""Extracts --gtest_output from the args if present."""
for index, arg in enumerate(args):
if arg.startswith('--gtest_output='):
return args.pop(index)
def main():
parser = optparse.OptionParser()
group = optparse.OptionGroup(
parser, 'Compability flag with the old sharding_supervisor')
group.add_option(
'--no-color', action='store_true', help='Ignored')
group.add_option(
'--retry-failed', action='store_true', help='Ignored')
group.add_option(
'-t', '--timeout', type='int', help='Kept as --timeout')
group.add_option(
'--total-slaves', type='int', default=1, help='Converted to --index')
group.add_option(
'--slave-index', type='int', default=0, help='Converted to --shards')
parser.add_option_group(group)
parser.disable_interspersed_args()
options, args = parser.parse_args()
swarm_client_dir = os.path.join(ROOT_DIR, 'tools', 'swarm_client')
sys.path.insert(0, swarm_client_dir)
cmd = [
'--shards', str(options.total_slaves),
'--index', str(options.slave_index),
'--no-dump',
'--no-cr',
]
if options.timeout is not None:
cmd.extend(['--timeout', str(options.timeout)])
gtest_output = pop_gtest_output(args)
if gtest_output:
# It is important that --gtest_output appears before the '--' so it is
# properly processed by run_test_cases.
cmd.append(gtest_output)
import run_test_cases # pylint: disable=F0401
return run_test_cases.main(cmd + ['--'] + args)
if __name__ == '__main__':
sys.exit(main())
| Switch over to run_test_cases.py, take 2. | Switch over to run_test_cases.py, take 2.
Instead of doing a "Revert r168479 'Revert r168478'", it's using a simpler version based on renaming sharding_supervisor first.
Try to enable run_test_cases.py again.
R=phajdan.jr@chromium.org
BUG=164886
Review URL: https://codereview.chromium.org/11472024
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@180696 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,anirudhSK/chromium,anirudhSK/chromium,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,zcbenz/cefode-chromium,dushu1203/chromium.src,hujiajie/pa-chromium,dushu1203/chromium.src,M4sse/chromium.src,mogoweb/chromium-crosswalk,anirudhSK/chromium,krieger-od/nwjs_chromium.src,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,Chilledheart/chromium,ondra-novak/chromium.src,timopulkkinen/BubbleFish,M4sse/chromium.src,hujiajie/pa-chromium,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,ondra-novak/chromium.src,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,hujiajie/pa-chromium,littlstar/chromium.src,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,Chilledheart/chromium,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk,littlstar/chromium.src,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,timopulkkinen/BubbleFish,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,anirudhSK/chromium,Fireblend/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,jaruba/chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,markYoungH/chromium.src,patrickm/chromium.src,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,hujiajie/pa-chromium,pozdnyakov/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,bright-sparks/chromium-spacewalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,timopulkkinen/BubbleFish,Jonekee/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,zcbenz/cefode-chromium,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,littlstar/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,M4sse/chromium.src,littlstar/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,ltilve/chromium,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,zcbenz/cefode-chromium,bright-sparks/chromium-spacewalk,dednal/chromium.src,ChromiumWebApps/chromium,Chilledheart/chromium,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,mogoweb/chromium-crosswalk,Jonekee/chromium.src,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,zcbenz/cefode-chromium,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,littlstar/chromium.src,Just-D/chromium-1,ChromiumWebApps/chromium,M4sse/chromium.src,littlstar/chromium.src,ltilve/chromium,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,zcbenz/cefode-chromium,chuan9/chromium-crosswalk,dushu1203/chromium.src,dednal/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,patrickm/chromium.src,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,Just-D/chromium-1,ondra-novak/chromium.src,patrickm/chromium.src,zcbenz/cefode-chromium,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,Chilledheart/chromium,axinging/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,Just-D/chromium-1,M4sse/chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,Fireblend/chromium-crosswalk,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,ltilve/chromium,markYoungH/chromium.src,jaruba/chromium.src,ChromiumWebApps/chromium,axinging/chromium-crosswalk,littlstar/chromium.src,krieger-od/nwjs_chromium.src,jaruba/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,patrickm/chromium.src,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,ondra-novak/chromium.src,ChromiumWebApps/chromium,M4sse/chromium.src,jaruba/chromium.src,zcbenz/cefode-chromium,Jonekee/chromium.src,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mogoweb/chromium-crosswalk,ltilve/chromium,pozdnyakov/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,axinging/chromium-crosswalk,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,anirudhSK/chromium,anirudhSK/chromium,mogoweb/chromium-crosswalk,timopulkkinen/BubbleFish,patrickm/chromium.src,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,dednal/chromium.src,hujiajie/pa-chromium,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,timopulkkinen/BubbleFish,Pluto-tv/chromium-crosswalk,dednal/chromium.src,ChromiumWebApps/chromium,axinging/chromium-crosswalk,markYoungH/chromium.src,zcbenz/cefode-chromium,ltilve/chromium,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,timopulkkinen/BubbleFish,anirudhSK/chromium,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk | ---
+++
@@ -3,8 +3,64 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from sharding_supervisor_old import * # pylint: disable=W0401,W0614
+"""Defer to run_test_cases.py."""
+
+import os
+import optparse
+import sys
+
+ROOT_DIR = os.path.dirname(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-if __name__ == "__main__":
+def pop_gtest_output(args):
+ """Extracts --gtest_output from the args if present."""
+ for index, arg in enumerate(args):
+ if arg.startswith('--gtest_output='):
+ return args.pop(index)
+
+
+def main():
+ parser = optparse.OptionParser()
+
+ group = optparse.OptionGroup(
+ parser, 'Compability flag with the old sharding_supervisor')
+ group.add_option(
+ '--no-color', action='store_true', help='Ignored')
+ group.add_option(
+ '--retry-failed', action='store_true', help='Ignored')
+ group.add_option(
+ '-t', '--timeout', type='int', help='Kept as --timeout')
+ group.add_option(
+ '--total-slaves', type='int', default=1, help='Converted to --index')
+ group.add_option(
+ '--slave-index', type='int', default=0, help='Converted to --shards')
+ parser.add_option_group(group)
+
+ parser.disable_interspersed_args()
+ options, args = parser.parse_args()
+
+ swarm_client_dir = os.path.join(ROOT_DIR, 'tools', 'swarm_client')
+ sys.path.insert(0, swarm_client_dir)
+
+ cmd = [
+ '--shards', str(options.total_slaves),
+ '--index', str(options.slave_index),
+ '--no-dump',
+ '--no-cr',
+ ]
+ if options.timeout is not None:
+ cmd.extend(['--timeout', str(options.timeout)])
+ gtest_output = pop_gtest_output(args)
+ if gtest_output:
+ # It is important that --gtest_output appears before the '--' so it is
+ # properly processed by run_test_cases.
+ cmd.append(gtest_output)
+
+ import run_test_cases # pylint: disable=F0401
+
+ return run_test_cases.main(cmd + ['--'] + args)
+
+
+if __name__ == '__main__':
sys.exit(main()) |
7e70f6deb9e66a43c37241bc4e86ec2f85d81b4a | api/radar_api/serializers/sources.py | api/radar_api/serializers/sources.py | from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.source_types import SourceTypeReferenceField
class SourceGroupSerializerMixin(object):
source_group = GroupReferenceField()
source_type = SourceTypeReferenceField()
def get_model_exclude(self):
attrs = super(SourceGroupSerializerMixin, self).get_model_exclude()
attrs.add('source_group_id')
return attrs
| from radar_api.serializers.groups import TinyGroupReferenceField
from radar_api.serializers.source_types import SourceTypeReferenceField
class SourceGroupSerializerMixin(object):
source_group = TinyGroupReferenceField()
source_type = SourceTypeReferenceField()
def get_model_exclude(self):
attrs = super(SourceGroupSerializerMixin, self).get_model_exclude()
attrs.add('source_group_id')
return attrs
| Use tiny serializer for source group | Use tiny serializer for source group
| Python | agpl-3.0 | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | ---
+++
@@ -1,9 +1,9 @@
-from radar_api.serializers.groups import GroupReferenceField
+from radar_api.serializers.groups import TinyGroupReferenceField
from radar_api.serializers.source_types import SourceTypeReferenceField
class SourceGroupSerializerMixin(object):
- source_group = GroupReferenceField()
+ source_group = TinyGroupReferenceField()
source_type = SourceTypeReferenceField()
def get_model_exclude(self): |
a42997458baa1c6a1648896ff50f44e79525f8a1 | ognskylines/commands/devices/insert.py | ognskylines/commands/devices/insert.py | from ognskylines.dbutils import session
from ognskylines.model import Device
from ogn.utils import get_ddb, get_trackable
from manager import Manager
manager = Manager()
@manager.command
def import_ddb():
"""Import registered devices from the DDB (discards all devices before import)."""
session.query(Device).delete()
print("Import registered devices fom the DDB...")
devices = get_trackable(get_ddb())
for ogn_address in devices:
device = Device(ogn_address=ogn_address[3:])
session.add(device)
session.commit()
print("Imported {} devices.".format(len(devices)))
| from ognskylines.dbutils import session
from ognskylines.model import Device
import requests
from manager import Manager
manager = Manager()
DDB_URL = "http://ddb.glidernet.org/download/?j=1"
def get_ddb():
devices = requests.get(DDB_URL).json()
for device in devices['devices']:
device.update({'identified': device['identified'] == 'Y',
'tracked': device['tracked'] == 'Y'})
yield device
@manager.command
def import_ddb():
"""Import registered devices from the DDB (discards all devices before import)."""
session.query(Device).delete()
print("Import registered devices fom the DDB...")
for device in get_ddb():
if device['identified'] and device['tracked']:
session.add(Device(ogn_address=device['device_id']))
session.commit()
print("Imported {} devices.".format(session.query(Device).count()))
| Add function to fetch devices from the DDB | Add function to fetch devices from the DDB
| Python | agpl-3.0 | kerel-fs/ogn-skylines-gateway,kerel-fs/ogn-skylines-gateway | ---
+++
@@ -1,10 +1,20 @@
from ognskylines.dbutils import session
from ognskylines.model import Device
-from ogn.utils import get_ddb, get_trackable
-
+import requests
from manager import Manager
manager = Manager()
+
+
+DDB_URL = "http://ddb.glidernet.org/download/?j=1"
+
+
+def get_ddb():
+ devices = requests.get(DDB_URL).json()
+ for device in devices['devices']:
+ device.update({'identified': device['identified'] == 'Y',
+ 'tracked': device['tracked'] == 'Y'})
+ yield device
@manager.command
@@ -13,9 +23,9 @@
session.query(Device).delete()
print("Import registered devices fom the DDB...")
- devices = get_trackable(get_ddb())
- for ogn_address in devices:
- device = Device(ogn_address=ogn_address[3:])
- session.add(device)
+ for device in get_ddb():
+ if device['identified'] and device['tracked']:
+ session.add(Device(ogn_address=device['device_id']))
+
session.commit()
- print("Imported {} devices.".format(len(devices)))
+ print("Imported {} devices.".format(session.query(Device).count())) |
0ac3ab3278e81aebe4717e0f599f752b4fda06d3 | examples/swat-s1/tests.py | examples/swat-s1/tests.py | """
swat-s1 tests.
"""
# from mininet.cli import CLI
from mininet.net import Mininet
from nose.plugins.skip import SkipTest
from utils import STATE, RWT_INIT_LEVEL
from utils import TANK_SECTION
from topo import SwatTopo
from physical_process import RawWaterTank
# import subprocess
# import sys
@SkipTest
def test_init():
pass
def test_topo():
topo = SwatTopo()
net = Mininet(topo=topo)
net.start()
net.pingAll()
net.stop()
def test_raw_water_tank():
RawWaterTank(
name='test_rwt',
state=STATE,
protocol=None,
section=TANK_SECTION,
level=RWT_INIT_LEVEL
)
| """
swat-s1 tests.
"""
# from mininet.cli import CLI
from mininet.net import Mininet
from utils import STATE, RWT_INIT_LEVEL
from utils import TANK_SECTION
from topo import SwatTopo
from physical_process import RawWaterTank
# import subprocess
# import sys
def test_init():
pass
def test_topo():
topo = SwatTopo()
net = Mininet(topo=topo)
net.start()
net.pingAll()
net.stop()
def test_raw_water_tank():
RawWaterTank(
name='test_rwt',
state=STATE,
protocol=None,
section=TANK_SECTION,
level=RWT_INIT_LEVEL
)
| Remove examples dep from nose | Remove examples dep from nose
| Python | mit | remmihsorp/minicps,scy-phy/minicps,remmihsorp/minicps,scy-phy/minicps | ---
+++
@@ -4,8 +4,6 @@
# from mininet.cli import CLI
from mininet.net import Mininet
-
-from nose.plugins.skip import SkipTest
from utils import STATE, RWT_INIT_LEVEL
from utils import TANK_SECTION
@@ -17,7 +15,6 @@
# import sys
-@SkipTest
def test_init():
pass |
d5ed26ebbd84ed16d8d39607ef138581aa3b9d75 | osf/migrations/0145_add_preprint_contenttype_to_collections.py | osf/migrations/0145_add_preprint_contenttype_to_collections.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-11-08 16:56
from __future__ import unicode_literals
from django.db import migrations
from osf.models import Collection
from django.contrib.contenttypes.models import ContentType
def reverse_func(state, schema):
preprint_content_type = ContentType.objects.get(app_label='osf', model='preprint')
collections = Collection.objects.filter(collected_types__in=[preprint_content_type.id])
for collection in collections:
collection.collected_types.remove(preprint_content_type)
def add_preprint_type_to_collections(state, schema):
preprint_content_type = ContentType.objects.get(app_label='osf', model='preprint')
collections = Collection.objects.exclude(collected_types__in=[preprint_content_type.id])
for collection in collections:
collection.collected_types.add(preprint_content_type)
class Migration(migrations.Migration):
dependencies = [
('osf', '0144_merge_20181113_1420'),
]
operations = [
migrations.RunPython(add_preprint_type_to_collections, reverse_func)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-11-08 16:56
from __future__ import unicode_literals
from django.db import migrations
from osf.models import Collection
from django.contrib.contenttypes.models import ContentType
def reverse_func(state, schema):
preprint_content_type = ContentType.objects.get(app_label='osf', model='preprint')
ThroughModel = Collection.collected_types.through
ThroughModel.objects.filter(contenttype_id=preprint_content_type.id).delete()
def add_preprint_type_to_collections(state, schema):
ThroughModel = Collection.collected_types.through
preprint_ct_id = ContentType.objects.get(app_label='osf', model='preprint').id
through_objects = []
collections = Collection.objects.exclude(collected_types__in=[preprint_ct_id])
for collection in collections:
through_objects.append(ThroughModel(collection_id=collection.id, contenttype_id=preprint_ct_id))
ThroughModel.objects.bulk_create(through_objects)
class Migration(migrations.Migration):
dependencies = [
('osf', '0144_merge_20181113_1420'),
]
operations = [
migrations.RunPython(add_preprint_type_to_collections, reverse_func)
]
| Use the Collection.collected_types through table to bulk add preprints as a valid collected_type for existing collections. | Use the Collection.collected_types through table to bulk add preprints as a valid collected_type for existing collections.
- Use the Collection - collected_types through table to remove preprints from collected_types.
| Python | apache-2.0 | CenterForOpenScience/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,adlius/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,felliott/osf.io,mattclark/osf.io,mfraezz/osf.io,felliott/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,saradbowman/osf.io,pattisdr/osf.io,baylee-d/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,adlius/osf.io,aaxelb/osf.io,adlius/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,mattclark/osf.io,adlius/osf.io,felliott/osf.io,cslzchen/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,cslzchen/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,saradbowman/osf.io,aaxelb/osf.io | ---
+++
@@ -9,16 +9,20 @@
def reverse_func(state, schema):
preprint_content_type = ContentType.objects.get(app_label='osf', model='preprint')
- collections = Collection.objects.filter(collected_types__in=[preprint_content_type.id])
- for collection in collections:
- collection.collected_types.remove(preprint_content_type)
+ ThroughModel = Collection.collected_types.through
+ ThroughModel.objects.filter(contenttype_id=preprint_content_type.id).delete()
def add_preprint_type_to_collections(state, schema):
- preprint_content_type = ContentType.objects.get(app_label='osf', model='preprint')
- collections = Collection.objects.exclude(collected_types__in=[preprint_content_type.id])
+ ThroughModel = Collection.collected_types.through
+ preprint_ct_id = ContentType.objects.get(app_label='osf', model='preprint').id
+
+ through_objects = []
+ collections = Collection.objects.exclude(collected_types__in=[preprint_ct_id])
for collection in collections:
- collection.collected_types.add(preprint_content_type)
+ through_objects.append(ThroughModel(collection_id=collection.id, contenttype_id=preprint_ct_id))
+
+ ThroughModel.objects.bulk_create(through_objects)
class Migration(migrations.Migration): |
68b86aff3c9004b12dbd05cd1861229e73883e38 | quickstart/python/understand/example-2/create_joke_task.6.x.py | quickstart/python/understand/example-2/create_joke_task.6.x.py | # Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
# Create a new task named 'tell_a_joke'
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
task = client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.tasks \
.create(unique_name='tell-a-joke')
# Provide actions for the new task
joke_actions = {
'actions': [
{'say': 'I was going to look for my missing watch, but I could never find the time.'}
]
}
# Update the tell-a-joke task to use this 'say' action.
client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.tasks(task.sid) \
.task_actions().update(joke_actions)
print(task.sid)
| # Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
# Provide actions for the new task
joke_actions = {
'actions': [
{'say': 'I was going to look for my missing watch, but I could never find the time.'}
]
}
# Create a new task named 'tell_a_joke'
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
task = client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.tasks \
.create(
unique_name='tell-a-joke',
task_actions=joke_actions)
print(task.sid)
| Update to include actions in Create | Update to include actions in Create | Python | mit | TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets | ---
+++
@@ -6,13 +6,6 @@
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
-# Create a new task named 'tell_a_joke'
-# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
-task = client.preview.understand \
- .assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
- .tasks \
- .create(unique_name='tell-a-joke')
-
# Provide actions for the new task
joke_actions = {
'actions': [
@@ -20,10 +13,13 @@
]
}
-# Update the tell-a-joke task to use this 'say' action.
-client.preview.understand \
- .assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
- .tasks(task.sid) \
- .task_actions().update(joke_actions)
+# Create a new task named 'tell_a_joke'
+# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
+task = client.preview.understand \
+ .assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
+ .tasks \
+ .create(
+ unique_name='tell-a-joke',
+ task_actions=joke_actions)
print(task.sid) |
4349e65c1c353f8808e139c57439a3dfe2e2846e | armstrong/core/arm_sections/views.py | armstrong/core/arm_sections/views.py | from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self, queryset=None):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
| from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self):
return self.get_section()
def get_section(self):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
| Call get_section from get_object for backwards compatibility | Call get_section from get_object for backwards compatibility | Python | apache-2.0 | texastribune/armstrong.core.tt_sections,armstrong/armstrong.core.arm_sections,texastribune/armstrong.core.tt_sections,texastribune/armstrong.core.tt_sections,armstrong/armstrong.core.arm_sections | ---
+++
@@ -10,7 +10,10 @@
context_object_name = 'section'
model = Section
- def get_object(self, queryset=None):
+ def get_object(self):
+ return self.get_section()
+
+ def get_section(self):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
|
7e0030eb22671897a80633d57056ba0f26f15a77 | src/coordinators/models.py | src/coordinators/models.py | from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
if (user.is_superuser
or not hasattr(user, 'coordinator')
or user.coordinator.is_manager):
return qs
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
| from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
# superusers and managers see everything
if (user.is_superuser
or hasattr(user, 'coordinator') and user.coordinator.is_manager):
return qs
# don't show anything to unconfigured users
if not hasattr(user, 'coordinator') or not user.coordinator.district:
return qs.none()
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
| Make filter_by_district more strict - don't show anything to unconfigured users | Make filter_by_district more strict - don't show anything to unconfigured users
| Python | mit | mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign | ---
+++
@@ -13,10 +13,13 @@
blank=True, null=True)
def filter_by_district(qs, user, lookup):
+ # superusers and managers see everything
if (user.is_superuser
- or not hasattr(user, 'coordinator')
- or user.coordinator.is_manager):
+ or hasattr(user, 'coordinator') and user.coordinator.is_manager):
return qs
+ # don't show anything to unconfigured users
+ if not hasattr(user, 'coordinator') or not user.coordinator.district:
+ return qs.none()
kwargs = {
lookup: user.coordinator.district
} |
347038c528b07f2553f09daab6915828ab2a6113 | tests/__init__.py | tests/__init__.py | # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from . import test_language, test_services, test_subliminal
import unittest
suite = unittest.TestSuite([test_language.suite(), test_services.suite(), test_subliminal.suite()])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite)
| # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from . import test_language, test_services, test_subliminal, test_videos
import unittest
suite = unittest.TestSuite([test_language.suite(), test_services.suite(), test_subliminal.suite(), test_videos.suite()])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite)
| Add test_videos to the main test suite | Add test_videos to the main test suite
| Python | mit | oxan/subliminal,t4lwh/subliminal,h3llrais3r/subliminal,getzze/subliminal,bogdal/subliminal,hpsbranco/subliminal,ravselj/subliminal,nvbn/subliminal,fernandog/subliminal,goll/subliminal,ratoaq2/subliminal,Elettronik/subliminal,neo1691/subliminal,juanmhidalgo/subliminal,SickRage/subliminal,Diaoul/subliminal,kbkailashbagaria/subliminal,ofir123/subliminal,pums974/subliminal | ---
+++
@@ -15,11 +15,11 @@
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
-from . import test_language, test_services, test_subliminal
+from . import test_language, test_services, test_subliminal, test_videos
import unittest
-suite = unittest.TestSuite([test_language.suite(), test_services.suite(), test_subliminal.suite()])
+suite = unittest.TestSuite([test_language.suite(), test_services.suite(), test_subliminal.suite(), test_videos.suite()])
if __name__ == '__main__': |
a18e29b492cefaac64b2bd217a897aff7ebd8466 | tests/__init__.py | tests/__init__.py | # tests.__init__
import os
import os.path
import shutil
import tempfile
import yvs.shared as yvs
temp_dir = tempfile.gettempdir()
yvs.LOCAL_DATA_DIR_PATH = os.path.join(temp_dir, 'yvs-data')
yvs.LOCAL_CACHE_DIR_PATH = os.path.join(temp_dir, 'yvs-cache')
def set_up():
try:
os.mkdir(yvs.LOCAL_DATA_DIR_PATH)
except OSError:
pass
try:
os.mkdir(yvs.LOCAL_CACHE_DIR_PATH)
except OSError:
pass
def tear_down():
try:
shutil.rmtree(yvs.LOCAL_CACHE_DIR_PATH)
except OSError:
pass
try:
shutil.rmtree(yvs.LOCAL_DATA_DIR_PATH)
except OSError:
pass
| # tests.__init__
import os
import os.path
import shutil
import tempfile
import yvs.shared as yvs
from mock import patch
temp_dir = tempfile.gettempdir()
local_data_dir_patcher = patch(
'yvs.shared.LOCAL_DATA_DIR_PATH',
os.path.join(temp_dir, 'yvs-data'))
local_cache_dir_patcher = patch(
'yvs.shared.LOCAL_CACHE_DIR_PATH',
os.path.join(temp_dir, 'yvs-cache'))
def set_up():
local_data_dir_patcher.start()
try:
os.mkdir(yvs.LOCAL_DATA_DIR_PATH)
except OSError:
pass
local_cache_dir_patcher.start()
try:
os.mkdir(yvs.LOCAL_CACHE_DIR_PATH)
except OSError:
pass
def tear_down():
try:
shutil.rmtree(yvs.LOCAL_CACHE_DIR_PATH)
except OSError:
pass
local_cache_dir_patcher.stop()
try:
shutil.rmtree(yvs.LOCAL_DATA_DIR_PATH)
except OSError:
pass
local_data_dir_patcher.stop()
| Use patchers for overriding data/cache directories | Use patchers for overriding data/cache directories
| Python | mit | caleb531/youversion-suggest,caleb531/youversion-suggest | ---
+++
@@ -5,18 +5,25 @@
import shutil
import tempfile
import yvs.shared as yvs
+from mock import patch
temp_dir = tempfile.gettempdir()
-yvs.LOCAL_DATA_DIR_PATH = os.path.join(temp_dir, 'yvs-data')
-yvs.LOCAL_CACHE_DIR_PATH = os.path.join(temp_dir, 'yvs-cache')
+local_data_dir_patcher = patch(
+ 'yvs.shared.LOCAL_DATA_DIR_PATH',
+ os.path.join(temp_dir, 'yvs-data'))
+local_cache_dir_patcher = patch(
+ 'yvs.shared.LOCAL_CACHE_DIR_PATH',
+ os.path.join(temp_dir, 'yvs-cache'))
def set_up():
+ local_data_dir_patcher.start()
try:
os.mkdir(yvs.LOCAL_DATA_DIR_PATH)
except OSError:
pass
+ local_cache_dir_patcher.start()
try:
os.mkdir(yvs.LOCAL_CACHE_DIR_PATH)
except OSError:
@@ -28,7 +35,9 @@
shutil.rmtree(yvs.LOCAL_CACHE_DIR_PATH)
except OSError:
pass
+ local_cache_dir_patcher.stop()
try:
shutil.rmtree(yvs.LOCAL_DATA_DIR_PATH)
except OSError:
pass
+ local_data_dir_patcher.stop() |
341dcac3331a21c1b747075ab73601cb08d4868d | compliance_checker/tests/helpers.py | compliance_checker/tests/helpers.py | from netCDF4 import Dataset
import tempfile
class MockNetCDF(Dataset):
"""
Wrapper object around NetCDF Dataset to write data only to memory.
"""
def __init__(self):
# taken from test/tst_diskless.py NetCDF library
# even though we aren't persisting data to disk, the constructor
# requires a filename not currently in use by another Dataset object..
tmp_filename = tempfile.NamedTemporaryFile(suffix='.nc',
delete=True).name
super(MockNetCDF, self).__init__(tmp_filename, 'w', diskless=True,
persist=False)
class MockTimeSeries(MockNetCDF):
"""
Mock time series with time dimension and time, lon, lat, and depth
variables defined
"""
def __init__(self):
super(MockTimeSeries, self).__init__()
self.createDimension('time', 500)
for v in ('time', 'lon', 'lat', 'depth'):
self.createVariable(v, 'd', ('time',))
class MockVariable(object):
'''
For mocking a dataset variable. Constructor optionally takes a NetCDF
variable, the NetCDF attributes of which will be copied over to this
object.
'''
def __init__(self, copy_var=None):
if copy_var is not None:
for att in copy_var.ncattrs():
setattr(self, att, getattr(copy_var, att))
| from netCDF4 import Dataset
import tempfile
class MockNetCDF(Dataset):
"""
Wrapper object around NetCDF Dataset to write data only to memory.
"""
def __init__(self):
# taken from test/tst_diskless.py NetCDF library
# even though we aren't persisting data to disk, the constructor
# requires a filename not currently in use by another Dataset object..
tmp_filename = tempfile.NamedTemporaryFile(suffix='.nc',
delete=True).name
super(MockNetCDF, self).__init__(tmp_filename, 'w', diskless=True,
persist=False)
class MockTimeSeries(MockNetCDF):
"""
Mock time series with time dimension and time, lon, lat, and depth
variables defined
"""
def __init__(self):
super(MockTimeSeries, self).__init__()
self.createDimension('time', 500)
for v in ('time', 'lon', 'lat', 'depth'):
self.createVariable(v, 'd', ('time',))
class MockVariable(object):
'''
For mocking a dataset variable. Constructor optionally takes a NetCDF
variable, the NetCDF attributes of which will be copied over to this
object.
'''
def __init__(self, copy_var=None):
if copy_var is not None:
self.name = copy_var.name
self.dimensions = copy_var.dimensions
for att in copy_var.ncattrs():
setattr(self, att, getattr(copy_var, att))
| Add name and dimensions attributes to MockVariable class | Add name and dimensions attributes to MockVariable class
| Python | apache-2.0 | DanielJMaher/compliance-checker,aodn/compliance-checker,ioos/compliance-checker,lukecampbell/compliance-checker,ocefpaf/compliance-checker | ---
+++
@@ -37,5 +37,7 @@
def __init__(self, copy_var=None):
if copy_var is not None:
+ self.name = copy_var.name
+ self.dimensions = copy_var.dimensions
for att in copy_var.ncattrs():
setattr(self, att, getattr(copy_var, att)) |
70d2a1cd5363b0a40cc488e9f12eca59bb307a0e | pyamf/adapters/_django_contrib_auth_models.py | pyamf/adapters/_django_contrib_auth_models.py | """
"""
from django.contrib.auth import models
import pyamf.adapters
models.User.__amf__ = {
'exclude': ('message_set', 'password'),
'readonly': ('username',)
}
# ensure that the adapter that we depend on is loaded ..
pyamf.adapters.get_adapter('django.db.models.base')
pyamf.register_package(models, models.__name__)
| """
"""
from django.contrib.auth import models
import pyamf.adapters
models.User.__amf__ = {
'exclude': ('message_set', 'password'),
'readonly': ('username',)
}
# ensure that the adapter that we depend on is loaded ..
pyamf.get_adapter('django.db.models.base')
pyamf.register_package(models, models.__name__)
| Use the new get_adapter api | Use the new get_adapter api
| Python | mit | thijstriemstra/pyamf,hydralabs/pyamf,hydralabs/pyamf,thijstriemstra/pyamf | ---
+++
@@ -12,6 +12,6 @@
}
# ensure that the adapter that we depend on is loaded ..
-pyamf.adapters.get_adapter('django.db.models.base')
+pyamf.get_adapter('django.db.models.base')
pyamf.register_package(models, models.__name__) |
7aa341cc37f480717a3b2c42a20f44b5d174e0eb | create_sample.py | create_sample.py | # importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create a sample of prior orders
orders_df = pd.read_csv("Data/orders.csv")
s = round(3214874 * 0.1)
i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s))
orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False)
# create a sample of train orders
s = round(131209 * 0.1)
j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s))
orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False)
# create a sample of prior order products
order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id')
order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv")
# create a sample of train order products
order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id')
order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv")
| # importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create a sample of prior orders
orders_df = pd.read_csv("Data/orders.csv")
s = round(3214874 * 0.01)
i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s))
orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False)
# create a sample of train orders
s = round(131209 * 0.01)
j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s))
orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False)
# create a sample of prior order products
order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id')
order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv")
# create a sample of train order products
order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id')
order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv")
| Reduce sample size for faster processing | fix: Reduce sample size for faster processing
| Python | mit | rjegankumar/instacart_prediction_model | ---
+++
@@ -5,12 +5,12 @@
# create a sample of prior orders
orders_df = pd.read_csv("Data/orders.csv")
-s = round(3214874 * 0.1)
+s = round(3214874 * 0.01)
i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s))
orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False)
# create a sample of train orders
-s = round(131209 * 0.1)
+s = round(131209 * 0.01)
j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s))
orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False)
|
316066b2415861b65d540b822df1b2afea906207 | regulations/management/commands/setup_cors.py | regulations/management/commands/setup_cors.py | import boto3
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Set CORS rules on the Notice and Comment attachment bucket'
def handle(self, *args, **options):
session = boto3.Session(
aws_access_key_id=settings.ATTACHMENT_ACCESS_KEY_ID,
aws_secret_access_key=settings.ATTACHMENT_SECRET_ACCESS_KEY,
)
s3 = session.client('s3')
s3.put_bucket_cors(
Bucket=settings.ATTACHMENT_BUCKET,
CORSConfiguration={
'CORSRules': [
{
'AllowedMethods': ['GET', 'PUT'],
'AllowedOrigins': settings.ALLOWED_HOSTS or ['*'],
'AllowedHeaders': ['*'],
},
],
},
)
| import boto3
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Set CORS rules on the Notice and Comment attachment bucket'
def handle(self, *args, **options):
hosts = settings.ALLOWED_HOSTS
origins = ['http://' + host for host in hosts]
origins = origins + ['https://' + host for host in hosts]
session = boto3.Session(
aws_access_key_id=settings.ATTACHMENT_ACCESS_KEY_ID,
aws_secret_access_key=settings.ATTACHMENT_SECRET_ACCESS_KEY,
)
s3 = session.client('s3')
s3.put_bucket_cors(
Bucket=settings.ATTACHMENT_BUCKET,
CORSConfiguration={
'CORSRules': [
{
'AllowedMethods': ['GET', 'PUT'],
'AllowedOrigins': origins or ['*'],
'AllowedHeaders': ['*'],
},
],
},
)
| Add protocol to hosts for CORS | Add protocol to hosts for CORS
| Python | cc0-1.0 | 18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,18F/regulations-site,eregs/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site,tadhg-ohiggins/regulations-site,18F/regulations-site,eregs/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site | ---
+++
@@ -7,18 +7,23 @@
help = 'Set CORS rules on the Notice and Comment attachment bucket'
def handle(self, *args, **options):
+ hosts = settings.ALLOWED_HOSTS
+ origins = ['http://' + host for host in hosts]
+ origins = origins + ['https://' + host for host in hosts]
+
session = boto3.Session(
aws_access_key_id=settings.ATTACHMENT_ACCESS_KEY_ID,
aws_secret_access_key=settings.ATTACHMENT_SECRET_ACCESS_KEY,
)
s3 = session.client('s3')
+
s3.put_bucket_cors(
Bucket=settings.ATTACHMENT_BUCKET,
CORSConfiguration={
'CORSRules': [
{
'AllowedMethods': ['GET', 'PUT'],
- 'AllowedOrigins': settings.ALLOWED_HOSTS or ['*'],
+ 'AllowedOrigins': origins or ['*'],
'AllowedHeaders': ['*'],
},
], |
cdfa65efbb9c97f060a51aa5613a2788c437e0a1 | pal/services/__init__.py | pal/services/__init__.py | from pal.services.bonapp_service import BonAppService
from pal.services.dictionary_service import DictionaryService
from pal.services.directory_service import DirectoryService
from pal.services.joke_service import JokeService
from pal.services.movie_service import MovieService
from pal.services.service import wrap_response
from pal.services.ultralingua_service import UltraLinguaService
from pal.services.weather_service import WeatherService
from pal.services.facebook_service import FacebookService
from pal.services.yelp_service import YelpService
from pal.services.wa_service import WAService
_SERVICE_CLASSES = [
BonAppService,
DictionaryService,
DirectoryService,
JokeService,
FacebookService,
MovieService,
UltraLinguaService,
WAService,
WeatherService,
YelpService,
]
_SERVICES = {cls.short_name(): cls() for cls in _SERVICE_CLASSES}
@wrap_response
def no_response():
return ('ERROR', "Sorry, I'm not sure what you mean.")
def get_all_service_names():
return _SERVICES.keys()
def get_service_by_name(name):
if name in _SERVICES:
return _SERVICES[name]
| from pal.services.bonapp_service import BonAppService
from pal.services.dictionary_service import DictionaryService
from pal.services.directory_service import DirectoryService
from pal.services.joke_service import JokeService
from pal.services.movie_service import MovieService
from pal.services.service import wrap_response
from pal.services.ultralingua_service import UltraLinguaService
from pal.services.weather_service import WeatherService
from pal.services.facebook_service import FacebookService
from pal.services.yelp_service import YelpService
from pal.services.wa_service import WAService
_SERVICE_CLASSES = [
BonAppService,
DictionaryService,
DirectoryService,
FacebookService,
JokeService,
MovieService,
UltraLinguaService,
WAService,
WeatherService,
YelpService,
]
_SERVICES = {cls.short_name(): cls() for cls in _SERVICE_CLASSES}
@wrap_response
def no_response():
return ('ERROR', "Sorry, I'm not sure what you mean.")
def get_all_service_names():
return _SERVICES.keys()
def get_service_by_name(name):
if name in _SERVICES:
return _SERVICES[name]
| Put JokeService in the right place alphabetically | Put JokeService in the right place alphabetically
| Python | bsd-3-clause | Machyne/pal,Machyne/pal,Machyne/pal,Machyne/pal | ---
+++
@@ -15,8 +15,8 @@
BonAppService,
DictionaryService,
DirectoryService,
+ FacebookService,
JokeService,
- FacebookService,
MovieService,
UltraLinguaService,
WAService, |
6fe588ea915d65fdab00b53f883b0a72ef6cf564 | tests/test_apd.py | tests/test_apd.py | import json
from sforparser.apd import scraper
INPUT_FILE = 'data/apd/input.txt'
def test_output_strips_email_spaces():
json_str = scraper(open(INPUT_FILE))
data = json.loads(json_str)
offensive_field = data[70]["locations"][0]["emails"]
expected = [
"ronald.sanders@sfdph.org",
"juanita.alvarado@sfdph.org",
"joseph.calderon@sfdph.org",
]
assert offensive_field == expected
| import json
import os
import pytest
from sforparser.apd import scraper
INPUT_FILE = 'data/apd/input.txt'
@pytest.fixture
def data():
json_str = scraper(open(INPUT_FILE))
artifact_dir = os.getenv('CIRCLE_ARTIFACTS')
if artifact_dir:
artifact_file = os.path.join(artifact_dir, 'apd.json')
open(artifact_file, 'w').write(json_str)
return json.loads(json_str)
def test_output_strips_email_spaces(data):
offensive_field = data[70]["locations"][0]["emails"]
expected = [
"ronald.sanders@sfdph.org",
"juanita.alvarado@sfdph.org",
"joseph.calderon@sfdph.org",
]
assert offensive_field == expected
| Switch to pytest fixture and generate artifact for circle ci | Switch to pytest fixture and generate artifact for circle ci
| Python | mit | sfbrigade/sf-openreferral-datalib | ---
+++
@@ -1,14 +1,26 @@
import json
+import os
+
+import pytest
from sforparser.apd import scraper
INPUT_FILE = 'data/apd/input.txt'
-def test_output_strips_email_spaces():
+@pytest.fixture
+def data():
json_str = scraper(open(INPUT_FILE))
- data = json.loads(json_str)
+ artifact_dir = os.getenv('CIRCLE_ARTIFACTS')
+ if artifact_dir:
+ artifact_file = os.path.join(artifact_dir, 'apd.json')
+ open(artifact_file, 'w').write(json_str)
+
+ return json.loads(json_str)
+
+
+def test_output_strips_email_spaces(data):
offensive_field = data[70]["locations"][0]["emails"]
expected = [
"ronald.sanders@sfdph.org", |
c67864e50b92c38cbcc0e4e8ae630ff9e7194a55 | profiles/views.py | profiles/views.py | from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.utils.datastructures import MultiValueDictKeyError
from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
| from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
return self.request.GET.get('next', reverse('profile'))
| Remove the exception handling code for getting the success url | Remove the exception handling code for getting the success url
| Python | bsd-2-clause | incuna/django-extensible-profiles | ---
+++
@@ -2,7 +2,6 @@
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
-from django.utils.datastructures import MultiValueDictKeyError
from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
@@ -41,8 +40,5 @@
return self.request.user.profile
def get_success_url(self):
- try:
- return self.request.GET['next']
- except MultiValueDictKeyError:
- return reverse('profile')
+ return self.request.GET.get('next', reverse('profile'))
|
48902df1fb9b8b299155cd7e2f9a2bf3444abdc8 | awsom/__init__.py | awsom/__init__.py | #!/usr/bin/python
from awsom.entity import Entity, Factory
from awsom.config import AccountEntity, config
class ModelRootFactory(Factory):
def __init__(self, entity):
super(ModelRootFactory, self).__init__(entity)
def populate(self):
# Attach all configuration-defined accounts as children of the entity
for account in config.get_account_names():
self.entity._add_child(account, AccountEntity(parent=self.entity, **config.get_account(account)))
return True
class ModelRootEntity(Entity):
def __init__(self, name):
super(ModelRootEntity, self).__init__(factory=ModelRootFactory(self),name=name)
def add_account(self, name, **attrs):
self[name] = AccountEntity(name, **attrs)
# Upon import, the registered accounts should be loaded into the model root, so the
# recommended usage would be something like:
# from awsom import model as aws
# So you can do something like:
# aws.add_account('devel_ls', access_key_id=xxxx, secret_access_key=yyyy)
# and then:
# aws.devel_ls.ec2.instances
model = ModelRootEntity(name='model')
| #!/usr/bin/python
from awsom.entity import Entity, Factory
from awsom.config import AccountEntity, config
class ModelRootFactory(Factory):
def __init__(self, entity):
super(ModelRootFactory, self).__init__(entity)
def populate(self):
# Attach all configuration-defined accounts as children of the entity
for account in config.get_account_names():
self.entity._add_child(account, AccountEntity(parent=self.entity, **config.get_account(account)))
return True
class ModelRootEntity(Entity):
def __init__(self, name):
super(ModelRootEntity, self).__init__(factory=ModelRootFactory(self),name=name)
def add_account(self, name, **attrs):
self._add_child(name, AccountEntity(parent=self, name=name, **attrs))
# Upon import, the registered accounts should be loaded into the model root, so the
# recommended usage would be something like:
# from awsom import model as aws
# So you can do something like:
# aws.add_account('devel_ls', access_key_id=xxxx, secret_access_key=yyyy)
# and then:
# aws.devel_ls.ec2.instances
model = ModelRootEntity(name='model')
| Fix bug with account additions using old Entity API | Fix bug with account additions using old Entity API
| Python | mit | tuxpiper/awsom | ---
+++
@@ -15,7 +15,7 @@
def __init__(self, name):
super(ModelRootEntity, self).__init__(factory=ModelRootFactory(self),name=name)
def add_account(self, name, **attrs):
- self[name] = AccountEntity(name, **attrs)
+ self._add_child(name, AccountEntity(parent=self, name=name, **attrs))
# Upon import, the registered accounts should be loaded into the model root, so the
# recommended usage would be something like: |
dff2b0cb2b425217435deaa7c33d54f168f1a9d7 | playground/testing.py | playground/testing.py | import numpy as np
import matplotlib.pyplot as plt
def takeFFT(data):
data = data / np.linalg.norm(data)
data_squared = np.square(data)
fft_out = np.fft.fft(data_squared)
fft_shape = np.fft.fftfreq(data_squared.shape[-1])
plt.stem(fft_shape, fft_out)
#plt.stem(fft_shape, np.fft.fftshift(fft_out))
def showData(data):
plt.plot(data.real)
plt.plot(data.imag)
def showStar(data):
plt.figure()
plt.plot(data.real, data.imag, '.')
def fixFreq(data, freq, hbar):
for i, val in enumerate(data):
data[i] = val*np.exp( (0 - 1j)*freq*i ) / hbar
return data
if __name__ == "__main__":
data = np.fromfile('new_received_trimmed.bin', dtype=np.complex64)
takeFFT(data)
#data = fixFreq(data, 0.007333 * np.pi, np.sqrt(0.68188))
#showData(data)
showStar(data)
plt.show()
| import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
def takeFFT(data):
data = data / np.linalg.norm(data)
data_squared = np.square(data)
fft_out = np.fft.fft(data_squared)
fft_shape = np.fft.fftfreq(data_squared.shape[-1])
a = np.absolute(fft_out).argmax()
print(fft_out[a], fft_shape[a])
#plt.stem(fft_shape, fft_out)
#plt.stem(fft_shape, np.fft.fftshift(fft_out))
def showData(data):
plt.plot(data.real, label="real")
plt.plot(data.imag, label="imag")
plt.legend()
def showStar(data):
plt.figure()
plt.plot(data.real, data.imag, '.')
plt.xlabel("Real")
plt.ylabel("Imaginary")
def fixFreq(data, freq, hbar):
for i, val in enumerate(data):
data[i] = val*np.exp( (0 - 1j)*freq*i ) / (-hbar)
return data
if __name__ == "__main__":
data = np.fromfile('new_received_trimmed.bin', dtype=np.complex64)
takeFFT(data)
data = fixFreq(data, -0.00275 * np.pi, -np.sqrt(0.024059 + 0.81616j) )
#data = fixFreq(data, -0.00275/2, np.sqrt(0.68188))
showData(data)
showStar(data)
plt.show()
| Fix frequency and phase offset | Fix frequency and phase offset
| Python | mit | williamalu/mimo_usrp | ---
+++
@@ -1,5 +1,6 @@
import numpy as np
import matplotlib.pyplot as plt
+import seaborn as sns
def takeFFT(data):
data = data / np.linalg.norm(data)
@@ -7,22 +8,28 @@
fft_out = np.fft.fft(data_squared)
fft_shape = np.fft.fftfreq(data_squared.shape[-1])
- plt.stem(fft_shape, fft_out)
+ a = np.absolute(fft_out).argmax()
+ print(fft_out[a], fft_shape[a])
+
+ #plt.stem(fft_shape, fft_out)
#plt.stem(fft_shape, np.fft.fftshift(fft_out))
def showData(data):
- plt.plot(data.real)
- plt.plot(data.imag)
+ plt.plot(data.real, label="real")
+ plt.plot(data.imag, label="imag")
+ plt.legend()
def showStar(data):
plt.figure()
plt.plot(data.real, data.imag, '.')
+ plt.xlabel("Real")
+ plt.ylabel("Imaginary")
def fixFreq(data, freq, hbar):
for i, val in enumerate(data):
- data[i] = val*np.exp( (0 - 1j)*freq*i ) / hbar
+ data[i] = val*np.exp( (0 - 1j)*freq*i ) / (-hbar)
return data
@@ -31,8 +38,9 @@
data = np.fromfile('new_received_trimmed.bin', dtype=np.complex64)
takeFFT(data)
- #data = fixFreq(data, 0.007333 * np.pi, np.sqrt(0.68188))
- #showData(data)
+ data = fixFreq(data, -0.00275 * np.pi, -np.sqrt(0.024059 + 0.81616j) )
+ #data = fixFreq(data, -0.00275/2, np.sqrt(0.68188))
+ showData(data)
showStar(data)
plt.show() |
4c23e08172f3a1dfc64e32fce53f8f7188a0bf0c | pubsubpull/api.py | pubsubpull/api.py | """
APIs exposed by pubsubpull.
"""
from __future__ import absolute_import
from async.api import schedule
from django.db import connection
from pubsubpull import _join_with_project_path
def add_trigger_function():
"""Used for older versions of Postres, or test runs where there are no
migrations.
"""
cursor = connection.cursor()
sql = file(_join_with_project_path("trigger-function.sql")).read()
cursor.execute(sql)
def change_detect(model):
"""Enable change detection on the requested model.
"""
cursor = connection.cursor()
sql = file(_join_with_project_path("trigger-attach.sql")).read()
sql = sql.format(db_table=model._meta.db_table)
cursor.execute(sql)
def pull(model, callback, **kwargs):
"""Start a job pulling data from one service to this one.
"""
schedule('pubsubpull.async.pull_monitor',
args=[model, callback], kwargs=kwargs)
| """
APIs exposed by pubsubpull.
"""
from __future__ import absolute_import
from async.api import schedule
from django.db import connection
from pubsubpull import _join_with_project_path
def add_trigger_function():
"""Used for older versions of Postres, or test runs where there are no
migrations.
"""
cursor = connection.cursor()
sql = file(_join_with_project_path("trigger-function.sql")).read()
cursor.execute(sql)
def change_detect(model):
"""Enable change detection on the requested model.
"""
cursor = connection.cursor()
sql = file(_join_with_project_path("trigger-attach.sql")).read()
sql = sql.format(db_table=model._meta.db_table)
cursor.execute(sql)
return sql
def pull(model, callback, **kwargs):
"""Start a job pulling data from one service to this one.
"""
schedule('pubsubpull.async.pull_monitor',
args=[model, callback], kwargs=kwargs)
| Return the SQL we've just tried to run | Return the SQL we've just tried to run
| Python | mit | KayEss/django-pubsubpull,KayEss/django-pubsubpull,KayEss/django-pubsubpull | ---
+++
@@ -25,6 +25,7 @@
sql = file(_join_with_project_path("trigger-attach.sql")).read()
sql = sql.format(db_table=model._meta.db_table)
cursor.execute(sql)
+ return sql
def pull(model, callback, **kwargs): |
c39c362e949a7d89f92207d0b26bc9f6d61eacae | ibmcnx/doc/DataSources.py | ibmcnx/doc/DataSources.py | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
print cell
cellid = AdminConfig.getid( )
dbs = AdminConfig.list( 'DataSource', cellid )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', cellid )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | Create script to save documentation to a file | 4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4 | Python | apache-2.0 | stoeps13/ibmcnx2,stoeps13/ibmcnx2 | ---
+++
@@ -15,8 +15,7 @@
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
-print cell
-cellid = AdminConfig.getid( )
+cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', cellid )
for db in dbs: |
c825373971d0ad24e4ca71fd4da88e55e62a6b17 | productmd/__init__.py | productmd/__init__.py | # -*- coding: utf-8 -*-
# Copyright (C) 2015 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
from .compose import Compose # noqa
from .composeinfo import ComposeInfo # noqa
from .discinfo import DiscInfo # noqa
from .images import Images # noqa
from .rpms import Rpms # noqa
from .treeinfo import TreeInfo # noqa
| Allow importing major classes directly from productmd | Allow importing major classes directly from productmd
This should simplify things for most users: just import productmd module
and Compose (the one for metadata loading), Rpms and Images classes are
directly available as well as ComposeInfo, DiscInfo and TreeInfo.
| Python | lgpl-2.1 | release-engineering/productmd,lubomir/productmd,release-engineering/productmd,lubomir/productmd | ---
+++
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2015 Red Hat, Inc.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program. If not, see
+# <http://www.gnu.org/licenses/>.
+
+from .compose import Compose # noqa
+from .composeinfo import ComposeInfo # noqa
+from .discinfo import DiscInfo # noqa
+from .images import Images # noqa
+from .rpms import Rpms # noqa
+from .treeinfo import TreeInfo # noqa | |
ceda6bd7388df744e3ffd93118919ae67c55fe97 | unittest-build.py | unittest-build.py | #!/usr/bin/python
import sys
import os
if sys.platform == 'win32':
os.system ( 'devenv /build libSequantoAutomation.sln' )
else:
os.system ( 'make' )
| #!/usr/bin/python
import sys
import os
if sys.platform == 'win32':
os.system ( 'devenv libSequantoAutomation.sln /build Debug' )
else:
os.system ( 'make' )
| Fix devenv call in unittesting script. | Fix devenv call in unittesting script.
| Python | apache-2.0 | seqzap/sequanto-automation,seqzap/sequanto-automation,rasmus-toftdahl-olesen/sequanto-automation,rasmus-toftdahl-olesen/sequanto-automation,micronpn/sequanto-automation,micronpn/sequanto-automation,rasmus-toftdahl-olesen/sequanto-automation,seqzap/sequanto-automation,seqzap/sequanto-automation,seqzap/sequanto-automation,rasmus-toftdahl-olesen/sequanto-automation,micronpn/sequanto-automation,rasmus-toftdahl-olesen/sequanto-automation,micronpn/sequanto-automation,micronpn/sequanto-automation | ---
+++
@@ -4,6 +4,6 @@
import os
if sys.platform == 'win32':
- os.system ( 'devenv /build libSequantoAutomation.sln' )
+ os.system ( 'devenv libSequantoAutomation.sln /build Debug' )
else:
os.system ( 'make' ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.