commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
f97d30c16de2b0e57a3ffb96c803bcafc3b0dca7
|
emission/core/wrapper/modestattimesummary.py
|
emission/core/wrapper/modestattimesummary.py
|
import logging
import emission.core.wrapper.wrapperbase as ecwb
import emission.core.wrapper.motionactivity as ecwm
# Used for various metrics such as count, distance, mean speed calorie consumption,
# median speed calorie consumption
# Should come later: carbon footprint
# Optimal doesn't look like it fits this, because it is not per mode
class ModeStatTimeSummary(ecwb.WrapperBase):
# We will end up with props like
# {
# MotionTypes.IN_VEHICLE: ecwb.WrapperBase.Access.WORM
# }
# Each distance will have
#
#
props = dict([(t.name, ecwb.WrapperBase.Access.WORM) for t in ecwm.MotionTypes])
props.update(
{'ts': ecwb.WrapperBase.Access.WORM, # YYYY-MM-DD
'local_dt': ecwb.WrapperBase.Access.WORM,
'fmt_time': ecwb.WrapperBase.Access.WORM} # YYYY-MM-DD
)
enums = {}
geojson = []
nullable = []
local_dates = ['end_local_dt']
def _populateDependencies(self):
pass
|
import logging
import emission.core.wrapper.wrapperbase as ecwb
import emission.core.wrapper.motionactivity as ecwm
# Used for various metrics such as count, distance, mean speed calorie consumption,
# median speed calorie consumption
# Should come later: carbon footprint
# Optimal doesn't look like it fits this, because it is not per mode
class ModeStatTimeSummary(ecwb.WrapperBase):
# We will end up with props like
# {
# MotionTypes.IN_VEHICLE: ecwb.WrapperBase.Access.WORM
# }
# Each distance will have
#
#
props = dict([(t.name, ecwb.WrapperBase.Access.WORM) for t in ecwm.MotionTypes])
props.update(
{'ts': ecwb.WrapperBase.Access.WORM, # YYYY-MM-DD
'local_dt': ecwb.WrapperBase.Access.WORM,
'fmt_time': ecwb.WrapperBase.Access.WORM} # YYYY-MM-DD
)
enums = {}
geojson = []
nullable = []
local_dates = ['local_dt']
def _populateDependencies(self):
pass
|
Fix the list of local_date objects in the ModeStatTimeSummary
|
Fix the list of local_date objects in the ModeStatTimeSummary
|
Python
|
bsd-3-clause
|
yw374cornell/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server
|
---
+++
@@ -25,7 +25,7 @@
enums = {}
geojson = []
nullable = []
- local_dates = ['end_local_dt']
+ local_dates = ['local_dt']
def _populateDependencies(self):
pass
|
e4a3d3c273a1b7e26e9fdcf7f8da060bf127f27e
|
examples/django_project/django_project/tests.py
|
examples/django_project/django_project/tests.py
|
import unittest
from datetime import datetime
from snapshottest.django import TestCase
def api_client_get(url):
return {
'url': url,
}
class TestDemo(TestCase):
def test_api_me(self):
now = datetime.now().isoformat()
my_api_response = api_client_get('/' + now)
self.assertMatchSnapshot(my_api_response)
if __name__ == '__main__':
unittest.main()
|
import unittest
from datetime import datetime
from snapshottest.django import TestCase
def api_client_get(url):
return {
'url': url,
}
class TestDemo(TestCase):
def test_api_me(self):
# Note this tests should fail unless the snapshot-update command line
# option is specified. Run `python manage.py test --snapshot-update`.
now = datetime.now().isoformat()
my_api_response = api_client_get('/' + now)
self.assertMatchSnapshot(my_api_response)
if __name__ == '__main__':
unittest.main()
|
Add note for test_api_me django-project test
|
Add note for test_api_me django-project test
|
Python
|
mit
|
syrusakbary/snapshottest
|
---
+++
@@ -13,6 +13,8 @@
class TestDemo(TestCase):
def test_api_me(self):
+ # Note this tests should fail unless the snapshot-update command line
+ # option is specified. Run `python manage.py test --snapshot-update`.
now = datetime.now().isoformat()
my_api_response = api_client_get('/' + now)
self.assertMatchSnapshot(my_api_response)
|
c1d906da71571552de92f028c91b3fc2ec2d28de
|
test/unit/registration/test_ants.py
|
test/unit/registration/test_ants.py
|
from nose.tools import *
import os, shutil
import logging
logger = logging.getLogger(__name__)
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
from qipipe.registration import ants
# The test parent directory.
ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
# The test fixture.
FIXTURE = os.path.join(ROOT, 'fixtures', 'registration', 'breast', 'subject03', 'session01')
# The test results.
RESULT = os.path.join(ROOT, 'results', 'registration', 'ants')
WORK = os.path.join(RESULT, 'work')
OUTPUT = os.path.join(RESULT, 'registered')
class TestANTS:
"""ANTS registration unit tests."""
def setup(self):
shutil.rmtree(RESULT, True)
def teardown(self):
shutil.rmtree(RESULT, True)
def test_registration(self):
rdict = ants.register(FIXTURE, output=OUTPUT, work=WORK)
# Verify that each input is registered.
for fn in os.listdir(FIXTURE):
f = os.path.join(FIXTURE, fn)
rfn = fn.replace('.dcm', 'Registered.nii.gz')
assert_equal(rfn, rdict[fn], "Missing registration mapping: %s" % rfn)
if __name__ == "__main__":
import nose
nose.main(defaultTest=__name__)
|
from nose.tools import *
import os, shutil
import logging
logger = logging.getLogger(__name__)
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
from qipipe.registration import ants
# The test parent directory.
ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
# The test fixture.
FIXTURE = os.path.join(ROOT, 'fixtures', 'registration', 'breast', 'Breast03', 'Session01')
# The test results.
RESULT = os.path.join(ROOT, 'results', 'registration', 'ants')
WORK = os.path.join(RESULT, 'work')
OUTPUT = os.path.join(RESULT, 'registered')
class TestANTS:
"""ANTS registration unit tests."""
def setup(self):
shutil.rmtree(RESULT, True)
def teardown(self):
shutil.rmtree(RESULT, True)
def test_registration(self):
rdict = ants.register(FIXTURE, output=OUTPUT, work=WORK)
# Verify that each input is registered.
for fn in os.listdir(FIXTURE):
f = os.path.join(FIXTURE, fn)
rfn = fn.replace('.dcm', 'Registered.nii.gz')
assert_equal(rfn, rdict[fn], "Missing registration mapping: %s" % rfn)
if __name__ == "__main__":
import nose
nose.main(defaultTest=__name__)
|
Standardize the fixture directory names.
|
Standardize the fixture directory names.
|
Python
|
bsd-2-clause
|
ohsu-qin/qipipe
|
---
+++
@@ -11,7 +11,7 @@
# The test parent directory.
ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
# The test fixture.
-FIXTURE = os.path.join(ROOT, 'fixtures', 'registration', 'breast', 'subject03', 'session01')
+FIXTURE = os.path.join(ROOT, 'fixtures', 'registration', 'breast', 'Breast03', 'Session01')
# The test results.
RESULT = os.path.join(ROOT, 'results', 'registration', 'ants')
WORK = os.path.join(RESULT, 'work')
|
857ff014ea7e889f75f741b191d82fadb09cd48d
|
integration_tests/telecommand/program_upload.py
|
integration_tests/telecommand/program_upload.py
|
import struct
from telecommand import Telecommand
class EraseBootTableEntry(Telecommand):
def apid(self):
return 0xB0
def payload(self):
mask = 0
for e in self._entries:
mask |= 1 << e
return [mask]
def __init__(self, entries):
self._entries = entries
class WriteProgramPart(Telecommand):
MAX_PART_SIZE = Telecommand.MAX_PAYLOAD_SIZE - 3
def apid(self):
return 0xB1
def payload(self):
mask = 0
for e in self._entries:
mask |= 1 << e
return list(struct.pack('<BI', mask, self._offset)) + list(self._content)
def __init__(self, entries, offset, content):
self._offset = offset
self._content = content
self._entries = entries
class FinalizeProgramEntry(Telecommand):
def apid(self):
return 0xB2
def payload(self):
mask = 0
for e in self._entries:
mask |= 1 << e
return list(struct.pack('<BIH', mask, self._length, self._expected_crc)) + list(self._name)
def __init__(self, entries, length, expected_crc, name):
self._entries = entries
self._length = length
self._expected_crc = expected_crc
self._name = name
|
import struct
from telecommand import Telecommand
class EraseBootTableEntry(Telecommand):
def apid(self):
return 0xB0
def payload(self):
mask = 0
for e in self._entries:
mask |= 1 << e
return [mask]
def __init__(self, entries):
self._entries = entries
class WriteProgramPart(Telecommand):
MAX_PART_SIZE = Telecommand.MAX_PAYLOAD_SIZE - 5
def apid(self):
return 0xB1
def payload(self):
mask = 0
for e in self._entries:
mask |= 1 << e
return list(struct.pack('<BI', mask, self._offset)) + list(self._content)
def __init__(self, entries, offset, content):
self._offset = offset
self._content = content
self._entries = entries
class FinalizeProgramEntry(Telecommand):
def apid(self):
return 0xB2
def payload(self):
mask = 0
for e in self._entries:
mask |= 1 << e
return list(struct.pack('<BIH', mask, self._length, self._expected_crc)) + list(self._name)
def __init__(self, entries, length, expected_crc, name):
self._entries = entries
self._length = length
self._expected_crc = expected_crc
self._name = name
|
Fix write program telecommand part size
|
Fix write program telecommand part size
|
Python
|
agpl-3.0
|
PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC
|
---
+++
@@ -19,7 +19,7 @@
class WriteProgramPart(Telecommand):
- MAX_PART_SIZE = Telecommand.MAX_PAYLOAD_SIZE - 3
+ MAX_PART_SIZE = Telecommand.MAX_PAYLOAD_SIZE - 5
def apid(self):
return 0xB1
|
c43fbdf1c0a4e976297f9fa75f97a2cfe4a0bee3
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oauth = OAuth()
lm = LoginManager()
lm.login_view = "views.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oauth.init_app(app)
from app.views import views
app.register_blueprint(views)
return app
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "views.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views import views
app.register_blueprint(views)
return app
|
Rename OAuth var to avoid naming conflicts
|
Rename OAuth var to avoid naming conflicts
|
Python
|
mit
|
Encrylize/MyDictionary,Encrylize/MyDictionary,Encrylize/MyDictionary
|
---
+++
@@ -6,7 +6,7 @@
from config import config
db = SQLAlchemy()
-oauth = OAuth()
+oa = OAuth()
lm = LoginManager()
lm.login_view = "views.login"
@@ -23,7 +23,7 @@
db.init_app(app)
lm.init_app(app)
- oauth.init_app(app)
+ oa.init_app(app)
from app.views import views
app.register_blueprint(views)
|
18a133cc3b56fdc1a5185bf3da3bed001e1b5c64
|
dictionary.py
|
dictionary.py
|
# Import non-standard modules
import json
def definition(word):
'''This function returns the available definitions(s) of the input'''
return data[word]
# Load dictionary data from data.json to python dictionary
data = json.load(open('data.json', 'r'))
while True:
ip = input("Enter word:")
if ip in {'!q', '!Q'}:
break
elif data.__contains__(ip):
print(definition(ip))
else:
print("Please enter a valid word! \nEnter '!q' to quit!!!\n")
|
# Import non-standard modules
import json
from difflib import get_close_matches
def definition(word):
'''This function returns the available definitions(s) of the input'''
return data[word]
# Load dictionary data from data.json to python dictionary
data = json.load(open('data.json', 'r'))
# Infinite loop for processing
while True:
# Accept case-insensitive input from user
ip = str(input("Enter word:")).lower()
# Exit from program - user choice
if ip == '!q':
break
# Check dictionary for definition
elif data.__contains__(ip):
print(definition(ip))
# If exact definition is not found, provide suggestion
elif len(get_close_matches(ip, data.keys(), cutoff=0.8)) > 0:
print("Did you mean to type",
get_close_matches(ip, data.keys(), cutoff=0.8)[0], "?(y/n):")
choice = str(input()).lower()
# Provide output if generated suggestion is accepted
if choice == 'y':
ip = get_close_matches(ip, data.keys(), cutoff=0.8)[0]
print(definition(ip))
# No suggestion or definition found
else:
print("No such word exists!! \nEnter '!q' to quit!!!")
|
Add feature to process user input and provide automatic suggestions
|
Add feature to process user input and provide automatic suggestions
|
Python
|
mit
|
jojenreed/Python-CLI-Dictionary
|
---
+++
@@ -1,5 +1,6 @@
# Import non-standard modules
import json
+from difflib import get_close_matches
def definition(word):
@@ -10,12 +11,25 @@
# Load dictionary data from data.json to python dictionary
data = json.load(open('data.json', 'r'))
-
+# Infinite loop for processing
while True:
- ip = input("Enter word:")
- if ip in {'!q', '!Q'}:
+ # Accept case-insensitive input from user
+ ip = str(input("Enter word:")).lower()
+ # Exit from program - user choice
+ if ip == '!q':
break
+ # Check dictionary for definition
elif data.__contains__(ip):
print(definition(ip))
+ # If exact definition is not found, provide suggestion
+ elif len(get_close_matches(ip, data.keys(), cutoff=0.8)) > 0:
+ print("Did you mean to type",
+ get_close_matches(ip, data.keys(), cutoff=0.8)[0], "?(y/n):")
+ choice = str(input()).lower()
+ # Provide output if generated suggestion is accepted
+ if choice == 'y':
+ ip = get_close_matches(ip, data.keys(), cutoff=0.8)[0]
+ print(definition(ip))
+ # No suggestion or definition found
else:
- print("Please enter a valid word! \nEnter '!q' to quit!!!\n")
+ print("No such word exists!! \nEnter '!q' to quit!!!")
|
cc838a311e891294da10ca7465782d71e622b5ef
|
dodge.py
|
dodge.py
|
import platform
class OSXDodger(object):
allowed_version = "10.12.1"
def __init__(self, applications_dir):
self.app_dir = applications_dir
def load_applications(self):
"""
Read all applications in the `/Applications/` dir
"""
pass
def select_applications(self):
"""
Allow user to select an application they want
not to appear on the Dock
"""
pass
def load_dodger_filer(self):
"""
Load the file to modify for the application
chosen by the user in `select_applications`
The file to be loaded for is `info.plist`
"""
pass
def dodge_application(self):
"""
Remive the application from the Dock
"""
pass
@classmethod
def pc_is_macintosh(cls):
"""
Check if it is an `Apple Computer` i.e a Mac
@return bool
"""
system = platform.system().lower()
sys_version = int((platform.mac_ver())[0].replace(".", ""))
allowed_version = int(cls.allowed_version.replace(".", ""))
if (system == "darwin") and (sys_version >= allowed_version):
return True
else:
print("\nSorry :(")
print("FAILED. OsX-dock-dodger is only applicable to computers " +
"running OS X {} or higher".format(cls.allowed_version))
return False
dodge = OSXDodger("/Applications/")
dodge.pc_is_macintosh()
|
import platform
class OSXDodger(object):
allowed_version = "10.12.1"
def __init__(self, applications_dir):
self.app_dir = applications_dir
def load_applications(self):
"""
Read all applications in the `/Applications/` dir
"""
self.pc_is_macintosh()
def select_applications(self):
"""
Allow user to select an application they want
not to appear on the Dock
"""
pass
def load_dodger_filer(self):
"""
Load the file to modify for the application
chosen by the user in `select_applications`
The file to be loaded for is `info.plist`
"""
pass
def dodge_application(self):
"""
Remive the application from the Dock
"""
pass
@classmethod
def pc_is_macintosh(cls):
"""
Check if it is an `Apple Computer` i.e a Mac
@return bool
"""
system = platform.system().lower()
sys_version = int((platform.mac_ver())[0].replace(".", ""))
allowed_version = int(cls.allowed_version.replace(".", ""))
if (system == "darwin") and (sys_version >= allowed_version):
return True
else:
print("\nSorry :(")
print("FAILED. OsX-dock-dodger is only applicable to computers " +
"running OS X {} or higher".format(cls.allowed_version))
return False
dodge = OSXDodger("/Applications/")
dodge.load_applications()
|
Add class method to check if PC is a Mac
|
Add class method to check if PC is a Mac
|
Python
|
mit
|
yoda-yoda/osx-dock-dodger,denisKaranja/osx-dock-dodger
|
---
+++
@@ -11,7 +11,7 @@
"""
Read all applications in the `/Applications/` dir
"""
- pass
+ self.pc_is_macintosh()
def select_applications(self):
"""
@@ -54,4 +54,4 @@
return False
dodge = OSXDodger("/Applications/")
-dodge.pc_is_macintosh()
+dodge.load_applications()
|
967f9363872ff64847e4b93a7c1ea75869eaabd9
|
benchmarks/benchmarks.py
|
benchmarks/benchmarks.py
|
# Write the benchmarking functions here.
# See "Writing benchmarks" in the asv docs for more information.
import os.path as osp
import numpy as np
from tempfile import TemporaryDirectory
import h5py
class TimeSuite:
"""
An example benchmark that times the performance of various kinds
of iterating over dictionaries in Python.
"""
def setup(self):
self._td = TemporaryDirectory()
path = osp.join(self._td.name, 'test.h5')
with h5py.File(path, 'w') as f:
f['a'] = np.arange(100000)
self.f = h5py.File(path, 'r')
def teardown(self):
self.f.close()
self._td.cleanup()
def time_many_small_reads(self):
ds = self.f['a']
for i in range(10000):
arr = ds[i * 10:(i + 1) * 10]
|
# Write the benchmarking functions here.
# See "Writing benchmarks" in the asv docs for more information.
import os.path as osp
import numpy as np
from tempfile import TemporaryDirectory
import h5py
class TimeSuite:
"""
An example benchmark that times the performance of various kinds
of iterating over dictionaries in Python.
"""
def setup(self):
self._td = TemporaryDirectory()
path = osp.join(self._td.name, 'test.h5')
with h5py.File(path, 'w') as f:
f['a'] = np.arange(100000)
self.f = h5py.File(path, 'r')
def teardown(self):
self.f.close()
self._td.cleanup()
def time_many_small_reads(self):
ds = self.f['a']
for i in range(10000):
arr = ds[i * 10:(i + 1) * 10]
class WritingTimeSuite:
"""Based on example in GitHub issue 492:
https://github.com/h5py/h5py/issues/492
"""
def setup(self):
self._td = TemporaryDirectory()
path = osp.join(self._td.name, 'test.h5')
self.f = h5py.File(path, 'w')
self.shape = shape = (128, 1024, 512)
self.f.create_dataset(
'a', shape=shape, dtype=np.float32, chunks=(1, shape[1], 64)
)
def teardown(self):
self.f.close()
self._td.cleanup()
def time_write_index_last_axis(self):
ds = self.f['a']
data = np.zeros(self.shape[:2])
for i in range(self.shape[2]):
ds[..., i] = data
def time_write_slice_last_axis(self):
ds = self.f['a']
data = np.zeros(self.shape[:2])
for i in range(self.shape[2]):
ds[..., i:i+1] = data[..., np.newaxis]
|
Add benchmark for writing with index of last dimension
|
Add benchmark for writing with index of last dimension
|
Python
|
bsd-3-clause
|
h5py/h5py,h5py/h5py,h5py/h5py
|
---
+++
@@ -26,3 +26,32 @@
ds = self.f['a']
for i in range(10000):
arr = ds[i * 10:(i + 1) * 10]
+
+class WritingTimeSuite:
+ """Based on example in GitHub issue 492:
+ https://github.com/h5py/h5py/issues/492
+ """
+ def setup(self):
+ self._td = TemporaryDirectory()
+ path = osp.join(self._td.name, 'test.h5')
+ self.f = h5py.File(path, 'w')
+ self.shape = shape = (128, 1024, 512)
+ self.f.create_dataset(
+ 'a', shape=shape, dtype=np.float32, chunks=(1, shape[1], 64)
+ )
+
+ def teardown(self):
+ self.f.close()
+ self._td.cleanup()
+
+ def time_write_index_last_axis(self):
+ ds = self.f['a']
+ data = np.zeros(self.shape[:2])
+ for i in range(self.shape[2]):
+ ds[..., i] = data
+
+ def time_write_slice_last_axis(self):
+ ds = self.f['a']
+ data = np.zeros(self.shape[:2])
+ for i in range(self.shape[2]):
+ ds[..., i:i+1] = data[..., np.newaxis]
|
3b0432edc1a9e55e6e7da5955a1ecbeea8000371
|
openpassword/keychain.py
|
openpassword/keychain.py
|
from openpassword.exceptions import InvalidPasswordException
class Keychain:
def __init__(self, encryption_key):
self.encryption_key = encryption_key
self._locked = True
def unlock(self, password):
try:
self.encryption_key.decrypt(password)
self._locked = False
except InvalidPasswordException as e:
self._locked = True
raise(e)
def lock(self):
self._locked = True
def is_locked(self):
return self._locked
|
from openpassword.exceptions import InvalidPasswordException
class Keychain:
def __init__(self, encryption_key):
self.encryption_key = encryption_key
self._locked = True
def unlock(self, password):
try:
self.encryption_key.decrypt(password)
self._locked = False
except InvalidPasswordException as e:
self._locked = True
raise e
def lock(self):
self._locked = True
def is_locked(self):
return self._locked
|
Remove unecessary parentesis from raise call
|
Remove unecessary parentesis from raise call
|
Python
|
mit
|
openpassword/blimey,openpassword/blimey
|
---
+++
@@ -13,7 +13,7 @@
self._locked = False
except InvalidPasswordException as e:
self._locked = True
- raise(e)
+ raise e
def lock(self):
self._locked = True
|
7613fc5c3ef1cc552d39e5fde533e8f9af7cc931
|
form_designer/tests/test_cms_plugin.py
|
form_designer/tests/test_cms_plugin.py
|
import django
from django.contrib.auth.models import AnonymousUser
from django.utils.crypto import get_random_string
import pytest
from cms import api
from cms.page_rendering import render_page
from form_designer.contrib.cms_plugins.form_designer_form.cms_plugins import FormDesignerPlugin
from form_designer.models import FormDefinition, FormDefinitionField
@pytest.mark.django_db
def test_cms_plugin_renders_in_cms_page(rf):
if django.VERSION >= (1, 10):
pytest.xfail('This test is broken in Django 1.10+')
fd = FormDefinition.objects.create(
mail_to='test@example.com',
mail_subject='Someone sent you a greeting: {{ test }}'
)
field = FormDefinitionField.objects.create(
form_definition=fd,
name='test',
label=get_random_string(),
field_class='django.forms.CharField',
)
page = api.create_page("test", "page.html", "en")
ph = page.get_placeholders()[0]
api.add_plugin(ph, FormDesignerPlugin, "en", form_definition=fd)
request = rf.get("/")
request.user = AnonymousUser()
request.current_page = page
response = render_page(request, page, "fi", "test")
response.render()
content = response.content.decode("utf8")
assert field.label in content
assert "<form" in content
|
from django.contrib.auth.models import AnonymousUser
from django.utils.crypto import get_random_string
import pytest
from cms import api
from cms.page_rendering import render_page
from form_designer.contrib.cms_plugins.form_designer_form.cms_plugins import FormDesignerPlugin
from form_designer.models import FormDefinition, FormDefinitionField
@pytest.mark.django_db
def test_cms_plugin_renders_in_cms_page(rf):
fd = FormDefinition.objects.create(
mail_to='test@example.com',
mail_subject='Someone sent you a greeting: {{ test }}'
)
field = FormDefinitionField.objects.create(
form_definition=fd,
name='test',
label=get_random_string(),
field_class='django.forms.CharField',
)
page = api.create_page("test", "page.html", "en")
ph = page.get_placeholders()[0]
api.add_plugin(ph, FormDesignerPlugin, "en", form_definition=fd)
request = rf.get("/")
request.user = AnonymousUser()
request.current_page = page
response = render_page(request, page, "fi", "test")
response.render()
content = response.content.decode("utf8")
assert field.label in content
assert "<form" in content
|
Revert "Disable Django-CMS test on Django 1.10+"
|
Revert "Disable Django-CMS test on Django 1.10+"
Django CMS tests should work now with Django 1.10 and 1.11 too, since
the Django CMS version 3.4.5 supports them.
This reverts commit fcfe2513fc8532dc2212a254da42d75048e76de7.
|
Python
|
bsd-3-clause
|
kcsry/django-form-designer,andersinno/django-form-designer-ai,kcsry/django-form-designer,andersinno/django-form-designer,andersinno/django-form-designer-ai,andersinno/django-form-designer
|
---
+++
@@ -1,4 +1,3 @@
-import django
from django.contrib.auth.models import AnonymousUser
from django.utils.crypto import get_random_string
@@ -11,8 +10,6 @@
@pytest.mark.django_db
def test_cms_plugin_renders_in_cms_page(rf):
- if django.VERSION >= (1, 10):
- pytest.xfail('This test is broken in Django 1.10+')
fd = FormDefinition.objects.create(
mail_to='test@example.com',
mail_subject='Someone sent you a greeting: {{ test }}'
|
e1da1e7e8fedd288e9ac3a41b529c5bfe3e0612b
|
orchestrator/__init__.py
|
orchestrator/__init__.py
|
from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.10'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
|
from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.4.0'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
|
Prepare for next development version
|
Prepare for next development version
|
Python
|
mit
|
totem/cluster-orchestrator,totem/cluster-orchestrator,totem/cluster-orchestrator
|
---
+++
@@ -2,7 +2,7 @@
from celery.signals import setup_logging
import orchestrator.logger
-__version__ = '0.3.10'
+__version__ = '0.4.0'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
|
39a1212508c27a5c21f8b027fef3fb409a28657f
|
app/commands.py
|
app/commands.py
|
from flask import current_app
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.command('list-routes')(list_routes)
|
import click
from flask import current_app
from flask.cli import with_appcontext
@click.command('list-routes')
@with_appcontext
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.add_command(list_routes)
|
Switch existing command to standard approach
|
Switch existing command to standard approach
This is the suggested approach in the documentation [1] and using
it makes it clearer what's going on and to add other commands with
arguments, which we'll do in the next commit.
[1]: https://flask.palletsprojects.com/en/2.0.x/cli/#custom-commands
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
---
+++
@@ -1,6 +1,10 @@
+import click
from flask import current_app
+from flask.cli import with_appcontext
+@click.command('list-routes')
+@with_appcontext
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
@@ -8,4 +12,4 @@
def setup_commands(application):
- application.cli.command('list-routes')(list_routes)
+ application.cli.add_command(list_routes)
|
79a2f5ddc1d6d05dca0d44ee6586b2d8809ee1c0
|
deep_q_rl/ale_run.py
|
deep_q_rl/ale_run.py
|
"""This script launches all of the processes necessary to train a
deep Q-network on an ALE game.
Usage:
ale_run.py [--glue_port GLUE_PORT]
All unrecognized command line arguments will be passed on to
rl_glue_ale_agent.py
"""
import subprocess
import sys
import os
import argparse
ROM_PATH = "/home/spragunr/neural_rl_libraries/roms/breakout.bin"
# Check for glue_port command line argument and set it up...
parser = argparse.ArgumentParser(description='Neural rl agent.')
parser.add_argument('--glue_port', type=str, default="4096",
help='rlglue port (default 4096)')
args, unknown = parser.parse_known_args()
my_env = os.environ.copy()
my_env["RLGLUE_PORT"] = args.glue_port
# Start the necessary processes:
p1 = subprocess.Popen(['rl_glue'], env=my_env)
ale_string = ("ale -game_controller rlglue -frame_skip 4 "
"-restricted_action_set true ")
p2 = subprocess.Popen(ale_string + ROM_PATH, shell=True, env=my_env)
p3 = subprocess.Popen(['./rl_glue_ale_experiment.py'], env=my_env)
p4 = subprocess.Popen(['./rl_glue_ale_agent.py'] + sys.argv[1:], env=my_env)
p1.wait()
p2.wait()
p3.wait()
p4.wait()
|
"""This script launches all of the processes necessary to train a
deep Q-network on an ALE game.
Usage:
ale_run.py [--glue_port GLUE_PORT]
All unrecognized command line arguments will be passed on to
rl_glue_ale_agent.py
"""
import subprocess
import sys
import os
import argparse
# Put your binaries under the directory 'deep_q_rl/roms'
ROM_PATH = "../roms/breakout.bin"
# Check for glue_port command line argument and set it up...
parser = argparse.ArgumentParser(description='Neural rl agent.')
parser.add_argument('--glue_port', type=str, default="4096",
help='rlglue port (default 4096)')
args, unknown = parser.parse_known_args()
my_env = os.environ.copy()
my_env["RLGLUE_PORT"] = args.glue_port
# Start the necessary processes:
p1 = subprocess.Popen(['rl_glue'], env=my_env)
ale_string = ("ale -game_controller rlglue -frame_skip 4 "
"-restricted_action_set true ")
p2 = subprocess.Popen(ale_string + ROM_PATH, shell=True, env=my_env)
p3 = subprocess.Popen(['./rl_glue_ale_experiment.py'], env=my_env)
p4 = subprocess.Popen(['./rl_glue_ale_agent.py'] + sys.argv[1:], env=my_env)
p1.wait()
p2.wait()
p3.wait()
p4.wait()
|
Make the position of the roms work for everybody.
|
Make the position of the roms work for everybody.
Here you previously used a directory that mentions your particular setting. Substitute it by a relative directory so it works for everybody. Also, suggest at the read file to create a 'rome' directory with 'breakout.bin' in it.
|
Python
|
bsd-3-clause
|
r0k3/deep_q_rl,aaannndddyyy/deep_q_rl,davidsj/deep_q_rl,spragunr/deep_q_rl,gogobebe2/deep_q_rl,aaannndddyyy/deep_q_rl,omnivert/deep_q_rl,npow/deep_q_rl,vvw/deep_q_rl,codeaudit/deep_q_rl,alito/deep_q_rl,jleni/deep_q_rl,peterzcc/deep_q_rl,davidsj/deep_q_rl,spragunr/deep_q_rl,codeaudit/deep_q_rl,udibr/deep_q_rl,udibr/deep_q_rl,sygi/deep_q_rl,r0k3/deep_q_rl,tmylk/deep_q_rl,jcatw/deep_q_rl,gogobebe2/deep_q_rl,vvw/deep_q_rl,jleni/deep_q_rl,tmylk/deep_q_rl,peterzcc/deep_q_rl,jcatw/deep_q_rl,alito/deep_q_rl,sygi/deep_q_rl,omnivert/deep_q_rl
|
---
+++
@@ -13,7 +13,8 @@
import os
import argparse
-ROM_PATH = "/home/spragunr/neural_rl_libraries/roms/breakout.bin"
+# Put your binaries under the directory 'deep_q_rl/roms'
+ROM_PATH = "../roms/breakout.bin"
# Check for glue_port command line argument and set it up...
parser = argparse.ArgumentParser(description='Neural rl agent.')
|
8b16084a4fe72a369ada80969f6b728abf611d8f
|
marathon_itests/environment.py
|
marathon_itests/environment.py
|
import time
from itest_utils import wait_for_marathon
from itest_utils import print_container_logs
def before_all(context):
wait_for_marathon()
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
print_container_logs('zookeeper')
print_container_logs('marathon')
if context.client:
while True:
apps = context.client.list_apps()
if not apps:
break
for app in apps:
context.client.delete_app(app.id, force=True)
time.sleep(0.5)
while context.client.list_deployments():
time.sleep(0.5)
|
import time
from itest_utils import wait_for_marathon
from itest_utils import print_container_logs
def before_all(context):
wait_for_marathon()
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
if scenario.status != 'passed':
print "Zookeeper container logs:"
print_container_logs('zookeeper')
print "Marathon container logs:"
print_container_logs('marathon')
if context.client:
while True:
apps = context.client.list_apps()
if not apps:
break
for app in apps:
context.client.delete_app(app.id, force=True)
time.sleep(0.5)
while context.client.list_deployments():
time.sleep(0.5)
|
Print container logs only in case of failed scenario
|
Print container logs only in case of failed scenario
|
Python
|
apache-2.0
|
somic/paasta,gstarnberger/paasta,Yelp/paasta,gstarnberger/paasta,somic/paasta,Yelp/paasta
|
---
+++
@@ -10,8 +10,11 @@
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
- print_container_logs('zookeeper')
- print_container_logs('marathon')
+ if scenario.status != 'passed':
+ print "Zookeeper container logs:"
+ print_container_logs('zookeeper')
+ print "Marathon container logs:"
+ print_container_logs('marathon')
if context.client:
while True:
apps = context.client.list_apps()
|
692dd2d6563f75ff1a421c81037f7654cababafa
|
tests/pages/internet_health.py
|
tests/pages/internet_health.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from selenium.webdriver.common.by import By
from pages.base import BasePage
from pages.regions.download_button import DownloadButton
class InternetHealthPage(BasePage):
URL_TEMPLATE = '/{locale}/internet-health/'
_download_button_locator = (By.ID, 'global-nav-download-firefox')
@property
def download_button(self):
el = self.find_element(*self._download_button_locator)
return DownloadButton(self, root=el)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from selenium.webdriver.common.by import By
from pages.base import BasePage
from pages.regions.download_button import DownloadButton
class InternetHealthPage(BasePage):
URL_TEMPLATE = '/{locale}/internet-health/'
_download_button_locator = (By.ID, 'protocol-nav-download-firefox')
@property
def download_button(self):
el = self.find_element(*self._download_button_locator)
return DownloadButton(self, root=el)
|
Fix failing integration test on /internet-health/
|
Fix failing integration test on /internet-health/
|
Python
|
mpl-2.0
|
mozilla/bedrock,alexgibson/bedrock,craigcook/bedrock,MichaelKohler/bedrock,sylvestre/bedrock,MichaelKohler/bedrock,pascalchevrel/bedrock,craigcook/bedrock,alexgibson/bedrock,ericawright/bedrock,pascalchevrel/bedrock,sgarrity/bedrock,mozilla/bedrock,alexgibson/bedrock,MichaelKohler/bedrock,hoosteeno/bedrock,flodolo/bedrock,sgarrity/bedrock,kyoshino/bedrock,sylvestre/bedrock,pascalchevrel/bedrock,flodolo/bedrock,flodolo/bedrock,ericawright/bedrock,kyoshino/bedrock,sylvestre/bedrock,alexgibson/bedrock,hoosteeno/bedrock,sylvestre/bedrock,ericawright/bedrock,mozilla/bedrock,hoosteeno/bedrock,kyoshino/bedrock,flodolo/bedrock,kyoshino/bedrock,craigcook/bedrock,sgarrity/bedrock,pascalchevrel/bedrock,MichaelKohler/bedrock,craigcook/bedrock,sgarrity/bedrock,mozilla/bedrock,ericawright/bedrock,hoosteeno/bedrock
|
---
+++
@@ -12,7 +12,7 @@
URL_TEMPLATE = '/{locale}/internet-health/'
- _download_button_locator = (By.ID, 'global-nav-download-firefox')
+ _download_button_locator = (By.ID, 'protocol-nav-download-firefox')
@property
def download_button(self):
|
413c0b7f2df43543fd360bca1a9a6b9de4f6f5e8
|
integration-tests/features/steps/user_intent.py
|
integration-tests/features/steps/user_intent.py
|
"""Basic checks for the server API."""
from behave import then, when
from urllib.parse import urljoin
import requests
from src.authorization_tokens import authorization
from src.parsing import parse_token_clause
def post_data_to_user_intent_endpoint(context, payload=None):
"""Post data into the REST API endpoint for user-intent."""
url = "/api/v1/user-intent"
if payload is not None:
context.response = requests.post(context.coreapi_url + url,
headers=authorization(context),
data=payload)
else:
context.response = requests.post(context.coreapi_url + url,
headers=authorization(context))
@when('I call user-intent endpoint without any payload')
def check_user_intent_without_payload(context):
"""Post no payload into the REST API endpoint for user-intent."""
post_data_to_user_intent_endpoint(context)
@when('I call user-intent endpoint with empty payload')
def check_user_intent_with_empty_payload(context):
"""Post empty into the REST API endpoint for user-intent."""
payload = {}
post_data_to_user_intent_endpoint(context, payload)
|
"""Basic checks for the server API."""
from behave import then, when
from urllib.parse import urljoin
import requests
from src.authorization_tokens import authorization
from src.parsing import parse_token_clause
def post_data_to_user_intent_endpoint(context, payload=None):
"""Post data into the REST API endpoint for user-intent."""
url = "/api/v1/user-intent"
if payload is not None:
context.response = requests.post(context.coreapi_url + url,
headers=authorization(context),
json=payload)
else:
context.response = requests.post(context.coreapi_url + url,
headers=authorization(context))
@when('I call user-intent endpoint without any payload')
def check_user_intent_without_payload(context):
"""Post no payload into the REST API endpoint for user-intent."""
post_data_to_user_intent_endpoint(context)
@when('I call user-intent endpoint with empty payload')
def check_user_intent_with_empty_payload(context):
"""Post empty into the REST API endpoint for user-intent."""
payload = {}
post_data_to_user_intent_endpoint(context, payload)
|
Fix - send JSON to the API, not raw data
|
Fix - send JSON to the API, not raw data
|
Python
|
apache-2.0
|
tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common
|
---
+++
@@ -15,7 +15,7 @@
if payload is not None:
context.response = requests.post(context.coreapi_url + url,
headers=authorization(context),
- data=payload)
+ json=payload)
else:
context.response = requests.post(context.coreapi_url + url,
headers=authorization(context))
|
a8af7cd7918322615a342c2fd662f394866da55f
|
tests/sentry/metrics/test_datadog.py
|
tests/sentry/metrics/test_datadog.py
|
from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
|
from __future__ import absolute_import
from mock import patch
from datadog.util.hostname import get_hostname
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=get_hostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=get_hostname(),
)
|
Test DatadogMetricsBackend against datadog's get_hostname
|
Test DatadogMetricsBackend against datadog's get_hostname
This fixes tests in Travis since the hostname returned is different
|
Python
|
bsd-3-clause
|
pauloschilling/sentry,pauloschilling/sentry,pauloschilling/sentry
|
---
+++
@@ -1,8 +1,8 @@
from __future__ import absolute_import
-import socket
+from mock import patch
-from mock import patch
+from datadog.util.hostname import get_hostname
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
@@ -18,7 +18,7 @@
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
- host=socket.gethostname(),
+ host=get_hostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
@@ -28,5 +28,5 @@
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
- host=socket.gethostname(),
+ host=get_hostname(),
)
|
7c117c4555fdf30b3d98a453c7e28245dca0c9b9
|
tests/test_stack/test_stack.py
|
tests/test_stack/test_stack.py
|
import unittest
from aids.stack.stack import Stack
class StackTestCase(unittest.TestCase):
'''
Unit tests for the Stack data structure
'''
def setUp(self):
pass
def test_stack_initialization(self):
test_stack = Stack()
self.assertTrue(isinstance(test_stack, Stack))
def test_stack_is_empty(self):
test_stack = Stack()
self.assertTrue(test_stack.is_empty())
def tearDown(self):
pass
|
import unittest
from aids.stack.stack import Stack
class StackTestCase(unittest.TestCase):
'''
Unit tests for the Stack data structure
'''
def setUp(self):
self.test_stack = Stack()
def test_stack_initialization(self):
self.assertTrue(isinstance(self.test_stack, Stack))
def test_stack_is_empty(self):
self.assertTrue(self.test_stack.is_empty())
def test_stack_push(self):
self.test_stack.push(1)
self.assertEqual(len(self.test_stack), 1)
def test_stack_peek(self):
self.test_stack.push(1)
self.assertEqual(self.test_stack.peek(), 1)
def test_stack_pop(self):
self.test_stack.push(1)
self.assertEqual(self.test_stack.pop(), 1)
def test_stack_size(self):
self.test_stack.push(1)
self.assertEqual(self.test_stack.size(), 1)
def tearDown(self):
pass
|
Add unit tests for push, peek, pop and size
|
Add unit tests for push, peek, pop and size
|
Python
|
mit
|
ueg1990/aids
|
---
+++
@@ -9,16 +9,29 @@
'''
def setUp(self):
- pass
+ self.test_stack = Stack()
def test_stack_initialization(self):
- test_stack = Stack()
- self.assertTrue(isinstance(test_stack, Stack))
+ self.assertTrue(isinstance(self.test_stack, Stack))
def test_stack_is_empty(self):
- test_stack = Stack()
- self.assertTrue(test_stack.is_empty())
+ self.assertTrue(self.test_stack.is_empty())
+ def test_stack_push(self):
+ self.test_stack.push(1)
+ self.assertEqual(len(self.test_stack), 1)
+
+ def test_stack_peek(self):
+ self.test_stack.push(1)
+ self.assertEqual(self.test_stack.peek(), 1)
+
+ def test_stack_pop(self):
+ self.test_stack.push(1)
+ self.assertEqual(self.test_stack.pop(), 1)
+
+ def test_stack_size(self):
+ self.test_stack.push(1)
+ self.assertEqual(self.test_stack.size(), 1)
def tearDown(self):
pass
|
1ef1851e508295f6d4bf01289591f42c21656df7
|
test/on_yubikey/test_interfaces.py
|
test/on_yubikey/test_interfaces.py
|
import unittest
from .framework import DestructiveYubikeyTestCase, exactly_one_yubikey_present
from yubikit.core.otp import OtpConnection
from yubikit.core.fido import FidoConnection
from yubikit.core.smartcard import SmartCardConnection
from ykman.device import connect_to_device
from time import sleep
@unittest.skipIf(
not exactly_one_yubikey_present(), "Exactly one YubiKey must be present."
)
class TestInterfaces(DestructiveYubikeyTestCase):
def try_connection(self, conn_type):
for _ in range(8):
try:
conn = connect_to_device(None, [conn_type])[0]
conn.close()
return
except Exception:
sleep(0.5)
self.fail("Failed connecting to device over %s" % conn_type)
def test_switch_interfaces(self):
self.try_connection(FidoConnection)
self.try_connection(OtpConnection)
self.try_connection(FidoConnection)
self.try_connection(SmartCardConnection)
self.try_connection(OtpConnection)
self.try_connection(SmartCardConnection)
self.try_connection(OtpConnection)
self.try_connection(FidoConnection)
self.try_connection(SmartCardConnection)
self.try_connection(FidoConnection)
self.try_connection(SmartCardConnection)
self.try_connection(OtpConnection)
|
import unittest
from .framework import DestructiveYubikeyTestCase, exactly_one_yubikey_present
from yubikit.core.otp import OtpConnection
from yubikit.core.fido import FidoConnection
from yubikit.core.smartcard import SmartCardConnection
from ykman.base import YUBIKEY
from ykman.device import connect_to_device
from time import sleep
@unittest.skipIf(
not exactly_one_yubikey_present(), "Exactly one YubiKey must be present."
)
class TestInterfaces(DestructiveYubikeyTestCase):
def try_connection(self, conn_type):
if self.key_type == YUBIKEY.NEO and conn_type == SmartCardConnection:
sleep(3.5)
conn, dev, info = connect_to_device(None, [conn_type])
conn.close()
def setUp(self):
conn, dev, info = connect_to_device()
conn.close()
self.key_type = dev.pid.get_type()
def test_switch_interfaces(self):
self.try_connection(FidoConnection)
self.try_connection(OtpConnection)
self.try_connection(FidoConnection)
self.try_connection(SmartCardConnection)
self.try_connection(OtpConnection)
self.try_connection(SmartCardConnection)
self.try_connection(FidoConnection)
|
Test handling of reclaim timeout.
|
Test handling of reclaim timeout.
|
Python
|
bsd-2-clause
|
Yubico/yubikey-manager,Yubico/yubikey-manager
|
---
+++
@@ -4,6 +4,7 @@
from yubikit.core.otp import OtpConnection
from yubikit.core.fido import FidoConnection
from yubikit.core.smartcard import SmartCardConnection
+from ykman.base import YUBIKEY
from ykman.device import connect_to_device
from time import sleep
@@ -13,14 +14,15 @@
)
class TestInterfaces(DestructiveYubikeyTestCase):
def try_connection(self, conn_type):
- for _ in range(8):
- try:
- conn = connect_to_device(None, [conn_type])[0]
- conn.close()
- return
- except Exception:
- sleep(0.5)
- self.fail("Failed connecting to device over %s" % conn_type)
+ if self.key_type == YUBIKEY.NEO and conn_type == SmartCardConnection:
+ sleep(3.5)
+ conn, dev, info = connect_to_device(None, [conn_type])
+ conn.close()
+
+ def setUp(self):
+ conn, dev, info = connect_to_device()
+ conn.close()
+ self.key_type = dev.pid.get_type()
def test_switch_interfaces(self):
self.try_connection(FidoConnection)
@@ -29,9 +31,4 @@
self.try_connection(SmartCardConnection)
self.try_connection(OtpConnection)
self.try_connection(SmartCardConnection)
- self.try_connection(OtpConnection)
self.try_connection(FidoConnection)
- self.try_connection(SmartCardConnection)
- self.try_connection(FidoConnection)
- self.try_connection(SmartCardConnection)
- self.try_connection(OtpConnection)
|
300cbd3ff4d0e5021892f7c9940635695cb017a3
|
integration-test/197-clip-buildings.py
|
integration-test/197-clip-buildings.py
|
from shapely.geometry import shape
# this is mid way along the High Line in NYC, which is a huge long
# "building". we should be clipping it to a buffer of 3x the tile
# dimensions.
# http://www.openstreetmap.org/way/37054313
with features_in_tile_layer(16, 19295, 24631, 'buildings') as buildings:
# max width and height in degress as 3x the size of the above tile
max_w = 0.0164794921875
max_h = 0.012484410579673977
# need to check that we at least saw the high line
saw_the_high_line = False
for building in buildings:
bounds = shape(building['geometry']).bounds
w = bounds[2] - bounds[0]
h = bounds[3] - bounds[1]
if building['properties']['id'] == 37054313:
saw_the_high_line = True
if w > max_w or h > max_h:
raise Exception("feature %r is %rx%r, larger than the allowed "
"%rx%r."
% (building['properties']['id'],
w, h, max_w, max_h))
if not saw_the_high_line:
raise Exception("Expected to see the High Line in this tile, "
"but didn't.")
|
from shapely.geometry import shape
# this is mid way along the High Line in NYC, which is a huge long
# "building". we should be clipping it to a buffer of 3x the tile
# dimensions.
# http://www.openstreetmap.org/relation/7141751
with features_in_tile_layer(16, 19295, 24631, 'buildings') as buildings:
# max width and height in degress as 3x the size of the above tile
max_w = 0.0164794921875
max_h = 0.012484410579673977
# need to check that we at least saw the high line
saw_the_high_line = False
for building in buildings:
bounds = shape(building['geometry']).bounds
w = bounds[2] - bounds[0]
h = bounds[3] - bounds[1]
if building['properties']['id'] == -7141751:
saw_the_high_line = True
if w > max_w or h > max_h:
raise Exception("feature %r is %rx%r, larger than the allowed "
"%rx%r."
% (building['properties']['id'],
w, h, max_w, max_h))
if not saw_the_high_line:
raise Exception("Expected to see the High Line in this tile, "
"but didn't.")
|
Update data for the High Line, NYC
|
Update data for the High Line, NYC
Looks like it was turned into a multipolygon relation in [this changeset](http://www.openstreetmap.org/changeset/47542769).
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
---
+++
@@ -3,7 +3,7 @@
# this is mid way along the High Line in NYC, which is a huge long
# "building". we should be clipping it to a buffer of 3x the tile
# dimensions.
-# http://www.openstreetmap.org/way/37054313
+# http://www.openstreetmap.org/relation/7141751
with features_in_tile_layer(16, 19295, 24631, 'buildings') as buildings:
# max width and height in degress as 3x the size of the above tile
max_w = 0.0164794921875
@@ -17,7 +17,7 @@
w = bounds[2] - bounds[0]
h = bounds[3] - bounds[1]
- if building['properties']['id'] == 37054313:
+ if building['properties']['id'] == -7141751:
saw_the_high_line = True
if w > max_w or h > max_h:
|
86446c6d1b0b8583562e0fccf1745e95ce7003c2
|
util/__init__.py
|
util/__init__.py
|
#!/usr/bin/env python
#
# dials.util.__init__.py
#
# Copyright (C) 2013 Diamond Light Source
#
# Author: James Parkhurst
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
from __future__ import division
class HalError(RuntimeError):
def __init__(self, string=''):
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Dave'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that. {1}'.format(
username, string)
# Init base class
RuntimeError.__init__(self, text)
def halraiser(e):
''' Function to re-raise an exception with a Hal message. '''
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Humanoid'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that.'.format(username)
# Append to exception
if len(e.args) == 0:
e.args = (text,)
elif len(e.args) == 1:
e.args = (text + ' ' + e.args[0],)
else:
e.args = (text,) + e.args
# Reraise the exception
raise
|
#!/usr/bin/env python
#
# dials.util.__init__.py
#
# Copyright (C) 2013 Diamond Light Source
#
# Author: James Parkhurst
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
from __future__ import division
class HalError(RuntimeError):
def __init__(self, string=''):
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Dave'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that. {1}'.format(
username, string)
# Init base class
RuntimeError.__init__(self, text)
def halraiser(e):
''' Function to re-raise an exception with a Hal message. '''
import logging
# Get the log and write the error to the log file
log = logging.getLogger(__name__)
log.error(e)
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Humanoid'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that.'.format(username)
# Append to exception
if len(e.args) == 0:
e.args = (text,)
elif len(e.args) == 1:
e.args = (text + ' ' + e.args[0],)
else:
e.args = (text,) + e.args
# Reraise the exception
raise
|
Print out errors to log.
|
Print out errors to log.
|
Python
|
bsd-3-clause
|
dials/dials,dials/dials,dials/dials,dials/dials,dials/dials
|
---
+++
@@ -32,6 +32,11 @@
def halraiser(e):
''' Function to re-raise an exception with a Hal message. '''
+ import logging
+
+ # Get the log and write the error to the log file
+ log = logging.getLogger(__name__)
+ log.error(e)
# Get the username
try:
|
aa3134912af3e57362310eb486d0f4e1d8660d0c
|
grains/grains.py
|
grains/grains.py
|
# File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
import itertools
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
def on_square(num):
for k, v in board.iteritems():
if k == num:
return v
def total_after(num):
if num == 1:
return 1
else:
for k, v in board.iteritems():
if k == num:
total_after = sum(map(board.get, itertools.takewhile(lambda key: key != v, board)))
return total_after
print (board)
print (total_after(1))
print(on_square(1))
|
# File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
def on_square(num):
for k, v in board.iteritems():
if k == num:
return v
def total_after(num):
total = 0
for i in range(1, num+1):
total += on_square(i)
return total
|
Reformat total_after function + Remove itertools
|
Reformat total_after function + Remove itertools
|
Python
|
mit
|
amalshehu/exercism-python
|
---
+++
@@ -4,7 +4,6 @@
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
-import itertools
square = [x for x in range(1, 65)]
@@ -19,14 +18,7 @@
def total_after(num):
- if num == 1:
- return 1
- else:
- for k, v in board.iteritems():
- if k == num:
- total_after = sum(map(board.get, itertools.takewhile(lambda key: key != v, board)))
- return total_after
-
-print (board)
-print (total_after(1))
-print(on_square(1))
+ total = 0
+ for i in range(1, num+1):
+ total += on_square(i)
+ return total
|
5188861fb873ea301eb5ec386f1df65c0707e146
|
openstack/tests/functional/object/v1/test_container.py
|
openstack/tests/functional/object/v1/test_container.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.object_store.v1 import container
from openstack.tests.functional import base
class TestContainer(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
@classmethod
def setUpClass(cls):
super(TestContainer, cls).setUpClass()
tainer = cls.conn.object_store.create_container(name=cls.NAME)
assert isinstance(tainer, container.Container)
cls.assertIs(cls.NAME, tainer.name)
@classmethod
def tearDownClass(cls):
pass
# TODO(thowe): uncomment this when bug/1451211 fixed
# tainer = cls.conn.object_store.delete_container(cls.NAME,
# ignore_missing=False)
# cls.assertIs(None, tainer)
def test_list(self):
names = [o.name for o in self.conn.object_store.containers()]
self.assertIn(self.NAME, names)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.object_store.v1 import container
from openstack.tests.functional import base
class TestContainer(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
@classmethod
def setUpClass(cls):
super(TestContainer, cls).setUpClass()
tainer = cls.conn.object_store.create_container(name=cls.NAME)
assert isinstance(tainer, container.Container)
cls.assertIs(cls.NAME, tainer.name)
@classmethod
def tearDownClass(cls):
result = cls.conn.object_store.delete_container(cls.NAME,
ignore_missing=False)
cls.assertIs(None, result)
def test_list(self):
names = [o.name for o in self.conn.object_store.containers()]
self.assertIn(self.NAME, names)
def test_get_metadata(self):
tainer = self.conn.object_store.get_container_metadata(self.NAME)
self.assertEqual(0, tainer.object_count)
self.assertEqual(0, tainer.bytes_used)
self.assertEqual(self.NAME, tainer.name)
|
Add functional tests for container metadata and delete
|
Add functional tests for container metadata and delete
Change-Id: Id13073b37d19482ca6ff6e53e315aaa424c3f1b9
|
Python
|
apache-2.0
|
openstack/python-openstacksdk,dtroyer/python-openstacksdk,mtougeron/python-openstacksdk,briancurtin/python-openstacksdk,dudymas/python-openstacksdk,briancurtin/python-openstacksdk,dudymas/python-openstacksdk,openstack/python-openstacksdk,stackforge/python-openstacksdk,dtroyer/python-openstacksdk,mtougeron/python-openstacksdk,stackforge/python-openstacksdk
|
---
+++
@@ -29,12 +29,16 @@
@classmethod
def tearDownClass(cls):
- pass
- # TODO(thowe): uncomment this when bug/1451211 fixed
- # tainer = cls.conn.object_store.delete_container(cls.NAME,
- # ignore_missing=False)
- # cls.assertIs(None, tainer)
+ result = cls.conn.object_store.delete_container(cls.NAME,
+ ignore_missing=False)
+ cls.assertIs(None, result)
def test_list(self):
names = [o.name for o in self.conn.object_store.containers()]
self.assertIn(self.NAME, names)
+
+ def test_get_metadata(self):
+ tainer = self.conn.object_store.get_container_metadata(self.NAME)
+ self.assertEqual(0, tainer.object_count)
+ self.assertEqual(0, tainer.bytes_used)
+ self.assertEqual(self.NAME, tainer.name)
|
6b84688c1b5a7f2e8c9e5007455b88cbaa845e9f
|
tests/test_track_output/results.py
|
tests/test_track_output/results.py
|
#!/usr/bin/env python
import os
import sys
import glob
import shutil
from subprocess import call
# If vtk python module is not available, we can't run track.py so skip this
# test
cwd = os.getcwd()
try:
import vtk
except ImportError:
print('----------------Skipping test-------------')
shutil.copy('results_true.dat', 'results_test.dat')
exit()
# Run track processing script
call(['../../track.py', '-o', 'poly'] +
glob.glob(''.join((cwd, '/track*'))))
poly = ''.join((cwd, '/poly.pvtp'))
assert os.path.isfile(poly), 'poly.pvtp file not found.'
shutil.copy('poly.pvtp', 'results_test.dat')
|
#!/usr/bin/env python
import os
import sys
import glob
import shutil
from subprocess import call
# If vtk python module is not available, we can't run track.py so skip this
# test
cwd = os.getcwd()
try:
import vtk
except ImportError:
print('----------------Skipping test-------------')
shutil.copy('results_true.dat', 'results_test.dat')
exit()
# Run track processing script
call(['../../scripts/openmc-track-to-vtk', '-o', 'poly'] +
glob.glob(''.join((cwd, '/track*'))))
poly = ''.join((cwd, '/poly.pvtp'))
assert os.path.isfile(poly), 'poly.pvtp file not found.'
shutil.copy('poly.pvtp', 'results_test.dat')
|
Fix path to script in test_track_output
|
Fix path to script in test_track_output
|
Python
|
mit
|
mjlong/openmc,wbinventor/openmc,bhermanmit/openmc,wbinventor/openmc,johnnyliu27/openmc,paulromano/openmc,mjlong/openmc,smharper/openmc,lilulu/openmc,johnnyliu27/openmc,amandalund/openmc,samuelshaner/openmc,mit-crpg/openmc,shikhar413/openmc,johnnyliu27/openmc,mit-crpg/openmc,liangjg/openmc,kellyrowland/openmc,amandalund/openmc,walshjon/openmc,lilulu/openmc,liangjg/openmc,liangjg/openmc,johnnyliu27/openmc,samuelshaner/openmc,mit-crpg/openmc,amandalund/openmc,smharper/openmc,shikhar413/openmc,samuelshaner/openmc,mit-crpg/openmc,shikhar413/openmc,paulromano/openmc,liangjg/openmc,smharper/openmc,shikhar413/openmc,samuelshaner/openmc,paulromano/openmc,walshjon/openmc,walshjon/openmc,smharper/openmc,bhermanmit/openmc,wbinventor/openmc,wbinventor/openmc,walshjon/openmc,paulromano/openmc,lilulu/openmc,amandalund/openmc,kellyrowland/openmc
|
---
+++
@@ -17,7 +17,7 @@
exit()
# Run track processing script
-call(['../../track.py', '-o', 'poly'] +
+call(['../../scripts/openmc-track-to-vtk', '-o', 'poly'] +
glob.glob(''.join((cwd, '/track*'))))
poly = ''.join((cwd, '/poly.pvtp'))
assert os.path.isfile(poly), 'poly.pvtp file not found.'
|
85cb348dab356386362fe7657dee3e31aa1f92bf
|
rep.py
|
rep.py
|
"""
The top level of the APL Read-Evaluate-Print loop
UNDER DEVELOPMENT
This version adds a dummy evaluate routine.
"""
import sys
# ------------------------------
def evaluate(expression):
"""
Evaluate an APL expression - dummy version
"""
return (expression)
def read_evaluate_print (prompt):
"""
Read input, echo input
"""
try:
while True:
print(end=prompt)
line = input().lstrip()
if line:
if line[0] == ')':
if line[0:4].upper() == ')OFF':
apl_exit("Bye bye")
print('⎕', evaluate(line))
except EOFError:
apl_exit(None)
def apl_quit ():
"""
Quit without clean up
"""
print ()
sys.exit(0)
def apl_exit (message):
"""
Clean up and quit
"""
if message is None:
print ()
else:
print (message)
sys.exit(0)
# EOF
|
"""
The top level of the APL Read-Evaluate-Print loop
UNDER DEVELOPMENT
This version adds simple APL exception handling
"""
import sys
from apl_exception import APL_Exception as apl_exception
# ------------------------------
def evaluate(expression):
"""
Evaluate an APL expression - dummy version
"""
return (expression)
def read_evaluate_print (prompt):
"""
Read input, echo input
"""
try:
while True:
print(end=prompt)
line = input().lstrip()
if line:
if line[0] == ')':
if line[0:4].upper() == ')OFF':
apl_exit("Bye bye")
try:
result = evaluate(line)
except apl_exception as e:
print(' '*(len(prompt)+len(line)-len(e.line)),end="^\n")
result = e.message
finally:
print('⎕', result)
except EOFError:
apl_exit(None)
def apl_quit ():
"""
Quit without clean up
"""
print()
sys.exit(0)
def apl_exit (message):
"""
Clean up and quit
"""
if message is None:
print()
else:
print(message)
sys.exit(0)
# EOF
|
Add exception handling to the read-evaluate-loop
|
Add exception handling to the read-evaluate-loop
|
Python
|
apache-2.0
|
NewForester/apl-py,NewForester/apl-py
|
---
+++
@@ -3,10 +3,12 @@
UNDER DEVELOPMENT
- This version adds a dummy evaluate routine.
+ This version adds simple APL exception handling
"""
import sys
+
+from apl_exception import APL_Exception as apl_exception
# ------------------------------
@@ -28,7 +30,15 @@
if line[0] == ')':
if line[0:4].upper() == ')OFF':
apl_exit("Bye bye")
- print('⎕', evaluate(line))
+
+ try:
+ result = evaluate(line)
+ except apl_exception as e:
+ print(' '*(len(prompt)+len(line)-len(e.line)),end="^\n")
+ result = e.message
+ finally:
+ print('⎕', result)
+
except EOFError:
apl_exit(None)
@@ -36,7 +46,7 @@
"""
Quit without clean up
"""
- print ()
+ print()
sys.exit(0)
def apl_exit (message):
@@ -44,9 +54,9 @@
Clean up and quit
"""
if message is None:
- print ()
+ print()
else:
- print (message)
+ print(message)
sys.exit(0)
# EOF
|
574fba0650e4c68b7a31533207c26d8d57ed49c2
|
run.py
|
run.py
|
from sys import argv
from examples.connect4.connect4 import Connect4Network
if __name__ == '__main__':
if argv[1] == 'connect4':
Connect4Network().play()
|
from sys import argv
from examples.connect4.connect4 import Connect4Network
if __name__ == '__main__':
if len(argv) > 1:
if argv[1] == 'connect4':
Connect4Network().play()
|
Check length of argv before accessing it
|
Check length of argv before accessing it
|
Python
|
mit
|
tysonzero/py-ann
|
---
+++
@@ -4,5 +4,6 @@
if __name__ == '__main__':
- if argv[1] == 'connect4':
- Connect4Network().play()
+ if len(argv) > 1:
+ if argv[1] == 'connect4':
+ Connect4Network().play()
|
a7867806a6bd3abfd6bf2bcac6c490965be000e2
|
tests/test_completeness.py
|
tests/test_completeness.py
|
import unittest as unittest
from syntax import Syntax
from jscodegen import CodeGenerator
def add_cases(generator):
def class_decorator(cls):
"""Add tests to `cls` generated by `generator()`."""
for f, token in generator():
test = lambda self, i=token, f=f: f(self, i)
test.__name__ = "test %s" % token.name
setattr(cls, test.__name__, test)
return cls
return class_decorator
def _test_tokens():
def t(self, to):
c = CodeGenerator({})
func_name = to.name.lower()
try:
getattr(c, func_name)
self.assertTrue(True, func_name)
except AttributeError:
self.fail("Not implemented: %s" % func_name)
for token in Syntax:
yield t, token
class TestCase(unittest.TestCase):
pass
TestCase = add_cases(_test_tokens)(TestCase)
if __name__=="__main__":
unittest.main()
|
import unittest as unittest
from jscodegen.syntax import Syntax
from jscodegen import CodeGenerator
def add_cases(generator):
def class_decorator(cls):
"""Add tests to `cls` generated by `generator()`."""
for f, token in generator():
test = lambda self, i=token, f=f: f(self, i)
test.__name__ = "test %s" % token.name
setattr(cls, test.__name__, test)
return cls
return class_decorator
def _test_tokens():
def t(self, to):
c = CodeGenerator({})
func_name = to.name.lower()
try:
getattr(c, func_name)
self.assertTrue(True, func_name)
except AttributeError:
self.fail("Not implemented: %s" % func_name)
for token in Syntax:
yield t, token
class TestCase(unittest.TestCase):
pass
TestCase = add_cases(_test_tokens)(TestCase)
if __name__=="__main__":
unittest.main()
|
Fix an issue in the tests
|
Fix an issue in the tests
|
Python
|
mit
|
ksons/jscodegen.py
|
---
+++
@@ -1,5 +1,5 @@
import unittest as unittest
-from syntax import Syntax
+from jscodegen.syntax import Syntax
from jscodegen import CodeGenerator
|
eb0a5e5768c2f699cbc1f6395d3c14320d8e730d
|
template/__init__.py
|
template/__init__.py
|
#!/usr/bin/env python
# pylint: disable=import-error
"""Generate files from Jinja2 templates and environment variables."""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
) # pylint: disable=duplicate-code
from os import environ
from sys import stdin, stdout
import argparse
from argparse import ArgumentParser
from jinja2 import Environment
import template.filters
__version__ = "0.6.4"
def render(template_string):
"""Render the template."""
env = Environment(autoescape=True)
# Add all functions in template.filters as Jinja filters.
# pylint: disable=invalid-name
for tf in filter(lambda x: not x.startswith("_"), dir(template.filters)):
env.filters[tf] = getattr(template.filters, tf)
t = env.from_string(template_string)
return t.render(environ)
def main():
"""Main entrypoint."""
parser = ArgumentParser(
description="""A CLI tool for generating files from Jinja2 templates
and environment variables."""
)
parser.add_argument(
"filename",
help="Input filename",
type=argparse.FileType("r"),
nargs="?",
)
parser.add_argument(
"-o",
"--output",
help="Output to filename",
type=argparse.FileType("w"),
)
args = parser.parse_args()
infd = args.filename if args.filename else stdin
outfd = args.output if args.output else stdout
print(render(infd.read()), file=outfd)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# pylint: disable=import-error
"""A CLI tool for generating files from Jinja2 templates and environment
variables."""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
) # pylint: disable=duplicate-code
from os import environ
from sys import stdin, stdout
import argparse
from argparse import ArgumentParser
from jinja2 import Environment
import template.filters
__version__ = "0.6.4"
def render(template_string):
"""Render the template."""
env = Environment(autoescape=True)
# Add all functions in template.filters as Jinja filters.
# pylint: disable=invalid-name
for tf in filter(lambda x: not x.startswith("_"), dir(template.filters)):
env.filters[tf] = getattr(template.filters, tf)
t = env.from_string(template_string)
return t.render(environ)
def main():
"""Main entrypoint."""
parser = ArgumentParser(description=__doc__)
parser.add_argument(
"filename",
help="Input filename",
type=argparse.FileType("r"),
nargs="?",
)
parser.add_argument(
"-o",
"--output",
help="Output to filename",
type=argparse.FileType("w"),
)
args = parser.parse_args()
infd = args.filename if args.filename else stdin
outfd = args.output if args.output else stdout
print(render(infd.read()), file=outfd)
if __name__ == "__main__":
main()
|
Use the docstring as the CLI description.
|
Use the docstring as the CLI description.
|
Python
|
agpl-3.0
|
adarnimrod/template,adarnimrod/template
|
---
+++
@@ -1,6 +1,7 @@
#!/usr/bin/env python
# pylint: disable=import-error
-"""Generate files from Jinja2 templates and environment variables."""
+"""A CLI tool for generating files from Jinja2 templates and environment
+variables."""
from __future__ import (
absolute_import,
@@ -32,10 +33,7 @@
def main():
"""Main entrypoint."""
- parser = ArgumentParser(
- description="""A CLI tool for generating files from Jinja2 templates
- and environment variables."""
- )
+ parser = ArgumentParser(description=__doc__)
parser.add_argument(
"filename",
help="Input filename",
|
3aa7dd8d1247fe9d426049c8b09db0a5b1af9067
|
prediction/test_tuner.py
|
prediction/test_tuner.py
|
from .tuner import Hyperband
import unittest
class HyperbandTestCase(unittest.TestCase):
def test_run(self):
observed_ns = []
observed_rs = []
observed_cs = []
def _get(n):
observed_ns.append(n)
return list(range(n))
def _test(r, c):
observed_rs.append(int(r))
observed_cs.append(len(c))
return [r * c for c in c]
tuner = Hyperband()
tuner.run(_get, _test)
expected_ns = [81, 34, 15, 8, 5]
expected_rs = [1, 3, 9, 27, 81, 3, 9, 27, 81, 9, 27, 81, 27, 81, 81]
expected_cs = [81, 27, 9, 3, 1, 27, 9, 3, 1, 9, 3, 1, 6, 2, 5]
self.assertEqual(expected_ns, observed_ns)
self.assertEqual(expected_rs, observed_rs)
self.assertEqual(expected_rs, observed_rs)
|
from .tuner import Hyperband
import unittest
class HyperbandTestCase(unittest.TestCase):
def test_run(self):
observed_ns = []
observed_rs = []
observed_cs = []
def _get(n):
observed_ns.append(n)
return list(range(n))
def _test(r, c):
observed_rs.append(int(r))
observed_cs.append(len(c))
return [r * c for c in c]
tuner = Hyperband()
tuner.run(_get, _test)
expected_ns = [81, 34, 15, 8, 5]
expected_rs = [1, 3, 9, 27, 81, 3, 9, 27, 81, 9, 27, 81, 27, 81, 81]
expected_cs = [81, 27, 9, 3, 1, 27, 9, 3, 1, 9, 3, 1, 6, 2, 5]
self.assertEqual(expected_ns, observed_ns)
self.assertEqual(expected_rs, observed_rs)
self.assertEqual(expected_cs, observed_cs)
|
Fix a typo in Hyperband’s test
|
Fix a typo in Hyperband’s test
|
Python
|
mit
|
learning-on-chip/google-cluster-prediction
|
---
+++
@@ -21,4 +21,4 @@
expected_cs = [81, 27, 9, 3, 1, 27, 9, 3, 1, 9, 3, 1, 6, 2, 5]
self.assertEqual(expected_ns, observed_ns)
self.assertEqual(expected_rs, observed_rs)
- self.assertEqual(expected_rs, observed_rs)
+ self.assertEqual(expected_cs, observed_cs)
|
14ef0cc78c327c67a2acdb68915d427e4babdd4b
|
run_tests.py
|
run_tests.py
|
from os import getcwd
from sys import path as ppath
ppath.insert(1,getcwd()+'/modules') # TODO: win32 compatibilite (python path)
import unittest
from lifegame.test.LifeGameTestSuite import LifeGameTestSuite
from tests.TestSuite import TestSuite
# TODO: Lister les tests ailleurs ? Recuperer les suite de tests de module auto
# (rappel: avant on utilise config.config mais il y avait un import croise)
runnable = unittest.TestSuite()
tests_suites = [TestSuite(), LifeGameTestSuite()]
for testsuite in tests_suites:
for test_case in testsuite.get_test_cases():
runnable.addTest(unittest.makeSuite(test_case))
runner=unittest.TextTestRunner()
runner.run(runnable)
|
from os import getcwd
from sys import path as ppath
ppath.insert(1,getcwd()+'/modules') # TODO: win32 compatibilite (python path)
import unittest
from lifegame.test.LifeGameTestSuite import LifeGameTestSuite
from tests.TestSuite import TestSuite
# TODO: Lister les tests ailleurs ? Recuperer les suite de tests de module auto
# (rappel: avant on utilise config.config mais il y avait un import croise)
runnable = unittest.TestSuite()
tests_suites = [TestSuite(), LifeGameTestSuite()]
for testsuite in tests_suites:
for test_case in testsuite.get_test_cases():
runnable.addTest(unittest.makeSuite(test_case))
runner=unittest.TextTestRunner()
exit(runner.run(runnable))
|
Test running: exit with run test status
|
Test running: exit with run test status
|
Python
|
apache-2.0
|
buxx/synergine
|
---
+++
@@ -17,4 +17,5 @@
runnable.addTest(unittest.makeSuite(test_case))
runner=unittest.TextTestRunner()
-runner.run(runnable)
+exit(runner.run(runnable))
+
|
ae593fd5de74a123b5064ef2e018b4955dc9e6c9
|
runserver.py
|
runserver.py
|
#!/usr/local/bin/python2.7
import sys
from datetime import date
from main import app
from upload_s3 import set_metadata
from flask_frozen import Freezer
# cron is called with 3 arguments, should only run in the first week of month
cron_condition = len(sys.argv) == 3 and date.today().day > 8
force_update = len(sys.argv) == 2 and sys.argv[1] == 'freeze'
if len(sys.argv) > 1: # if runserver is passed an argument
if cron_condition or force_update:
print "Cron succeeded"
#freezer = Freezer(app)
#freezer.freeze()
#set_metadata()
else:
app.run()
|
#!/usr/local/bin/python2.7
import sys
from datetime import date
from main import app
from upload_s3 import set_metadata
from flask_frozen import Freezer
# cron is called with 3 arguments, should only run in the first week of month
cron_condition = len(sys.argv) == 3 and date.today().day < 8
force_update = len(sys.argv) == 2 and sys.argv[1] == 'freeze'
if len(sys.argv) > 1: # if runserver is passed an argument
if cron_condition or force_update:
freezer = Freezer(app)
freezer.freeze()
set_metadata()
else:
app.run()
|
Complete testing, change back to production code
|
Complete testing, change back to production code
|
Python
|
apache-2.0
|
vprnet/dorothys-list,vprnet/dorothys-list,vprnet/dorothys-list
|
---
+++
@@ -8,14 +8,13 @@
from flask_frozen import Freezer
# cron is called with 3 arguments, should only run in the first week of month
-cron_condition = len(sys.argv) == 3 and date.today().day > 8
+cron_condition = len(sys.argv) == 3 and date.today().day < 8
force_update = len(sys.argv) == 2 and sys.argv[1] == 'freeze'
if len(sys.argv) > 1: # if runserver is passed an argument
if cron_condition or force_update:
- print "Cron succeeded"
- #freezer = Freezer(app)
- #freezer.freeze()
- #set_metadata()
+ freezer = Freezer(app)
+ freezer.freeze()
+ set_metadata()
else:
app.run()
|
3cef3e4774923b81e622f03aee44a933293c6a8d
|
modelview/migrations/0036_auto_20170322_1622.py
|
modelview/migrations/0036_auto_20170322_1622.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-03-22 15:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('modelview', '0035_auto_20160426_1721'),
]
operations = [
migrations.AlterField(
model_name='basicfactsheet',
name='source_of_funding',
field=models.CharField(help_text='What is the main source of funding?', max_length=200, null=True, verbose_name='Source of funding'),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-03-22 15:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
#('modelview', '0035_auto_20160426_1721'),
('modelview', '0035_auto_20170724_1801'),
]
operations = [
migrations.AlterField(
model_name='basicfactsheet',
name='source_of_funding',
field=models.CharField(help_text='What is the main source of funding?', max_length=200, null=True, verbose_name='Source of funding'),
),
]
|
Change reference from modelview/0035_auto_20160426 to 0035_auto_20170724
|
Change reference from modelview/0035_auto_20160426 to 0035_auto_20170724
|
Python
|
agpl-3.0
|
openego/oeplatform,tom-heimbrodt/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform,openego/oeplatform
|
---
+++
@@ -8,7 +8,8 @@
class Migration(migrations.Migration):
dependencies = [
- ('modelview', '0035_auto_20160426_1721'),
+ #('modelview', '0035_auto_20160426_1721'),
+ ('modelview', '0035_auto_20170724_1801'),
]
operations = [
|
c50d9efe011417162ab6e99e19973dcde6b3ddfa
|
utils/get_collection_object_count.py
|
utils/get_collection_object_count.py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(
description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument(
'--pynuxrc',
default='~/.pynuxrc-prod',
help="rcfile for use with pynux utils")
parser.add_argument(
'--components',
action='store_true',
help="show counts for object components")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
if not argv.components:
return
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(
description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument(
'--pynuxrc',
default='~/.pynuxrc',
help="rcfile for use with pynux utils")
parser.add_argument(
'--components',
action='store_true',
help="show counts for object components")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
uid_set = set()
for obj in objects:
uid_set.add(obj['uid'])
unique = len(uid_set)
print "unique uid count: {}".format(unique)
if not argv.components:
return
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
|
Change default arg value for pynuxrc
|
Change default arg value for pynuxrc
|
Python
|
bsd-3-clause
|
barbarahui/nuxeo-calisphere,barbarahui/nuxeo-calisphere
|
---
+++
@@ -13,7 +13,7 @@
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument(
'--pynuxrc',
- default='~/.pynuxrc-prod',
+ default='~/.pynuxrc',
help="rcfile for use with pynux utils")
parser.add_argument(
'--components',
@@ -27,6 +27,13 @@
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
+
+ uid_set = set()
+ for obj in objects:
+ uid_set.add(obj['uid'])
+
+ unique = len(uid_set)
+ print "unique uid count: {}".format(unique)
if not argv.components:
return
|
dad05c2eff78383e3179fd1f4a4502fe553afecd
|
tests/lib/docker_compose_tools.py
|
tests/lib/docker_compose_tools.py
|
# pylint: disable=line-too-long
""" Handles docker compose """
from lib.docker_tools import docker_compose
from lib.tools import show_status
def set_up(pg_version, es_version):
""" Start containers """
compose = docker_compose(pg_version, es_version)
show_status(
"Starting testing environment for PostgreSQL {pg_version} with Elasticsearch {es_version}...".format(
pg_version=pg_version, es_version=es_version
)
)
show_status("Stopping and Removing any old containers...")
compose("stop")
compose("rm", "--force")
show_status("Building new images...")
compose("build")
show_status("Starting new containers...")
compose("up", "-d")
show_status("Testing environment started")
def tear_down(pg_version, es_version):
""" Stop containers """
compose = docker_compose(pg_version, es_version)
show_status(
"Stopping testing environment for PostgreSQL {pg_version} with Elasticsearch {es_version}...".format(
pg_version=pg_version, es_version=es_version
)
)
compose("down")
show_status("Testing environment stopped")
|
# pylint: disable=line-too-long
""" Handles docker compose """
import sys
from sh import ErrorReturnCode
from lib.docker_tools import docker_compose
from lib.tools import show_status
def set_up(pg_version, es_version):
""" Start containers """
compose = docker_compose(pg_version, es_version)
show_status(
"Starting testing environment for PostgreSQL {pg_version} with Elasticsearch {es_version}...".format(
pg_version=pg_version, es_version=es_version
)
)
show_status("Stopping and Removing any old containers...")
compose("stop")
compose("rm", "--force")
show_status("Building new images...")
try:
compose("build")
except ErrorReturnCode as exc:
print("Failed to build images...")
print(exc.stdout.decode("utf-8"))
print()
print(exc.stderr.decode("utf-8"))
sys.exit(1)
show_status("Starting new containers...")
compose("up", "-d")
show_status("Testing environment started")
def tear_down(pg_version, es_version):
""" Stop containers """
compose = docker_compose(pg_version, es_version)
show_status(
"Stopping testing environment for PostgreSQL {pg_version} with Elasticsearch {es_version}...".format(
pg_version=pg_version, es_version=es_version
)
)
compose("down")
show_status("Testing environment stopped")
|
Print full stdout / stderr when image build fails
|
Print full stdout / stderr when image build fails
|
Python
|
mit
|
matthewfranglen/postgres-elasticsearch-fdw
|
---
+++
@@ -1,5 +1,9 @@
# pylint: disable=line-too-long
""" Handles docker compose """
+
+import sys
+
+from sh import ErrorReturnCode
from lib.docker_tools import docker_compose
from lib.tools import show_status
@@ -21,7 +25,14 @@
compose("rm", "--force")
show_status("Building new images...")
- compose("build")
+ try:
+ compose("build")
+ except ErrorReturnCode as exc:
+ print("Failed to build images...")
+ print(exc.stdout.decode("utf-8"))
+ print()
+ print(exc.stderr.decode("utf-8"))
+ sys.exit(1)
show_status("Starting new containers...")
compose("up", "-d")
|
482bcbaeddf3d18c445b780a083d723a85a6dd88
|
wok/renderers.py
|
wok/renderers.py
|
from markdown import markdown
import docutils.core
from docutils.writers.html4css1 import Writer as rst_html_writer
class Renderer(object):
extensions = []
@classmethod
def render(cls, plain):
return plain
class Markdown(Renderer):
extensions = ['markdown', 'mkd']
@classmethod
def render(cls, plain):
return markdown(plain, ['def_list', 'footnotes'])
class ReStructuredText(Renderer):
extensions = ['rst']
@classmethod
def render(cls, plain):
w = rst_html_writer()
return docutils.core.publish_parts(plain, writer=w)['body']
class Plain(Renderer):
extensions = 'txt'
@classmethod
def render(cls, plain):
return plain.replace('\n', '<br>')
all = [Renderer, Plain, Markdown, ReStructuredText]
|
from markdown import markdown
import docutils.core
from docutils.writers.html4css1 import Writer as rst_html_writer
class Renderer(object):
extensions = []
@classmethod
def render(cls, plain):
return plain
class Markdown(Renderer):
extensions = ['markdown', 'mkd']
@classmethod
def render(cls, plain):
return markdown(plain, ['def_list', 'footnotes', 'codehilite'])
class ReStructuredText(Renderer):
extensions = ['rst']
@classmethod
def render(cls, plain):
w = rst_html_writer()
return docutils.core.publish_parts(plain, writer=w)['body']
class Plain(Renderer):
extensions = 'txt'
@classmethod
def render(cls, plain):
return plain.replace('\n', '<br>')
all = [Renderer, Plain, Markdown, ReStructuredText]
|
Enable syntax highlighting with Markdown.
|
Enable syntax highlighting with Markdown.
|
Python
|
mit
|
wummel/wok,mythmon/wok,ngokevin/wok,matt-garman/wok,Avaren/wok,algor512/wok,edunham/wok,ngokevin/wok,chrplace/wok,moreati/wok,mythmon/wok,algor512/wok,vaygr/wok,vaygr/wok,edunham/wok,algor512/wok,moreati/wok,abbgrade/wok,edunham/wok,gchriz/wok,abbgrade/wok,matt-garman/wok,chrplace/wok,chrplace/wok,matt-garman/wok,jneves/wok,wummel/wok,moreati/wok,jneves/wok,gchriz/wok,Avaren/wok,vaygr/wok,mythmon/wok,gchriz/wok,jneves/wok,Avaren/wok
|
---
+++
@@ -14,7 +14,7 @@
@classmethod
def render(cls, plain):
- return markdown(plain, ['def_list', 'footnotes'])
+ return markdown(plain, ['def_list', 'footnotes', 'codehilite'])
class ReStructuredText(Renderer):
extensions = ['rst']
|
7fd5db30d8ce8b6ecc8fdb87f695f484e5e9f38f
|
jacquard/wsgi.py
|
jacquard/wsgi.py
|
"""
WSGI application target.
This module is designed for use when running the Jacquard server from a WSGI
web server such as waitress or gunicorn. `jacquard.wsgi` would be the module
to target, picking up the WSGI application from `app`.
In this case, the configuration file can be specified through the environment
variable `JACQUARD_CONFIG`; if left unspecified, the file 'config.cfg' in the
current working directory is assumed.
"""
from jacquard.config import load_config
from jacquard.service import get_wsgi_app
from .cli import DEFAULT_CONFIG_FILE_PATH
app = get_wsgi_app(load_config(DEFAULT_CONFIG_FILE_PATH))
|
"""
WSGI application target.
This module is designed for use when running the Jacquard server from a WSGI
web server such as waitress or gunicorn. `jacquard.wsgi` would be the module
to target, picking up the WSGI application from `app`.
In this case, the configuration file can be specified through the environment
variable `JACQUARD_CONFIG`; if left unspecified, the file 'config.cfg' in the
current working directory is assumed.
"""
import os
import logging
from jacquard.config import load_config
from jacquard.service import get_wsgi_app
from .cli import DEFAULT_CONFIG_FILE_PATH
LOG_LEVEL = os.environ.get('JACQUARD_LOG_LEVEL', 'errors').lower()
KNOWN_LOG_LEVELS = {
'debug': logging.DEBUG,
'info': logging.INFO,
'errors': logging.ERROR,
}
from jacquard.utils import check_keys
check_keys((LOG_LEVEL,), KNOWN_LOG_LEVELS, RuntimeError)
logging.basicConfig(level=KNOWN_LOG_LEVELS[LOG_LEVEL])
app = get_wsgi_app(load_config(DEFAULT_CONFIG_FILE_PATH))
|
Add environment variable for configuring logging
|
Add environment variable for configuring logging
|
Python
|
mit
|
prophile/jacquard,prophile/jacquard
|
---
+++
@@ -10,9 +10,25 @@
current working directory is assumed.
"""
+import os
+import logging
+
from jacquard.config import load_config
from jacquard.service import get_wsgi_app
from .cli import DEFAULT_CONFIG_FILE_PATH
+LOG_LEVEL = os.environ.get('JACQUARD_LOG_LEVEL', 'errors').lower()
+KNOWN_LOG_LEVELS = {
+ 'debug': logging.DEBUG,
+ 'info': logging.INFO,
+ 'errors': logging.ERROR,
+}
+
+from jacquard.utils import check_keys
+
+check_keys((LOG_LEVEL,), KNOWN_LOG_LEVELS, RuntimeError)
+
+logging.basicConfig(level=KNOWN_LOG_LEVELS[LOG_LEVEL])
+
app = get_wsgi_app(load_config(DEFAULT_CONFIG_FILE_PATH))
|
eef8498388c672b25344a3f6fd8c05166e4ed4f6
|
xea_core/urls.py
|
xea_core/urls.py
|
"""xea_core URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'api/auth/jwt/', include('jwt_knox.urls')),
url(r'^api/', include('api.urls')),
]
|
"""xea_core URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'api/auth/jwt/', include('jwt_knox.urls', namespace='jwt_knox')),
url(r'^api/', include('api.urls')),
]
|
Add namespace to jwt_knox URLs
|
Add namespace to jwt_knox URLs
|
Python
|
agpl-3.0
|
gpul-org/xea-core
|
---
+++
@@ -19,7 +19,7 @@
urlpatterns = [
url(r'^admin/', admin.site.urls),
- url(r'api/auth/jwt/', include('jwt_knox.urls')),
+ url(r'api/auth/jwt/', include('jwt_knox.urls', namespace='jwt_knox')),
url(r'^api/', include('api.urls')),
]
|
6cce924d849f209fb8251acd4c21d65ad0daa24c
|
plugins/basic_info_plugin.py
|
plugins/basic_info_plugin.py
|
import string
import textwrap
from veryprettytable import VeryPrettyTable
from plugins import BasePlugin
__author__ = 'peter'
class BasicInfoPlugin(BasePlugin):
short_description = 'List some basic info about the string in a table'
header = 'Basic info:'
default = True
description = textwrap.dedent('''\
This plugin provides some basic info about the string such as:
- Length
- Presence of alpha/digits/raw bytes''')
key = '--basic'
def handle(self):
table = VeryPrettyTable()
table.field_names = ['String', 'Length', '# Digits', '# Alpha', '# Punct.', '# Control']
for s in self.args['STRING']:
table.add_row((s, len(s), sum(x.isdigit() for x in s), sum(x.isalpha() for x in s),
sum(x in string.punctuation for x in s), sum(x not in string.printable for x in s)))
return str(table) + '\n'
|
import string
import textwrap
from veryprettytable import VeryPrettyTable
from plugins import BasePlugin
from plugins.util import green, red
__author__ = 'peter'
class BasicInfoPlugin(BasePlugin):
short_description = 'List some basic info about the string in a table'
header = 'Basic info:'
default = True
description = textwrap.dedent('''\
This plugin provides some basic info about the string such as:
- Length
- Presence of alpha/digits/raw bytes''')
key = '--basic'
def handle(self):
table = VeryPrettyTable()
table.field_names = ['String', 'Length', '# Digits', '# Alpha', '# Punct.', '# Control', 'Hex?']
for s in self.args['STRING']:
table.add_row((s, len(s), sum(x.isdigit() for x in s), sum(x.isalpha() for x in s),
sum(x in string.punctuation for x in s), sum(x not in string.printable for x in s),
green('✔') if all(x in string.hexdigits for x in s) else red('✗')))
return str(table) + '\n'
|
Add hex check to basic info
|
Add hex check to basic info
|
Python
|
mit
|
Sakartu/stringinfo
|
---
+++
@@ -4,6 +4,7 @@
from veryprettytable import VeryPrettyTable
from plugins import BasePlugin
+from plugins.util import green, red
__author__ = 'peter'
@@ -21,9 +22,10 @@
def handle(self):
table = VeryPrettyTable()
- table.field_names = ['String', 'Length', '# Digits', '# Alpha', '# Punct.', '# Control']
+ table.field_names = ['String', 'Length', '# Digits', '# Alpha', '# Punct.', '# Control', 'Hex?']
for s in self.args['STRING']:
table.add_row((s, len(s), sum(x.isdigit() for x in s), sum(x.isalpha() for x in s),
- sum(x in string.punctuation for x in s), sum(x not in string.printable for x in s)))
+ sum(x in string.punctuation for x in s), sum(x not in string.printable for x in s),
+ green('✔') if all(x in string.hexdigits for x in s) else red('✗')))
return str(table) + '\n'
|
1e82bf0e23775dfc2541dc8c2dec4447c20f2cda
|
tests/submitsaved.py
|
tests/submitsaved.py
|
#!/usr/bin/env python2
import pickle, sys
sys.path.append('../')
from lewas.parsers import UnitParser, AttrParser, field_rangler
import lewas.datastores
import lewas.models
config = "../config"
config = lewas.readConfig(config)
lewas.datastores.submitRequest(pickle.load(open(sys.argv[1])), config, False)
|
#!/usr/bin/env python2
import pickle, sys
sys.path.append('../')
from lewas.parsers import UnitParser, AttrParser, field_rangler
import lewas.datastores
import lewas.models
config = "../config"
config = lewas.readConfig(config)
for fn in sys.argv[1:]:
lewas.datastores.submitRequest(pickle.load(open(fn)), config, False)
print "processed", fn
|
Add capability to resubmit multiple files
|
Add capability to resubmit multiple files
|
Python
|
mit
|
LEWASatVT/lewas
|
---
+++
@@ -8,4 +8,6 @@
config = "../config"
config = lewas.readConfig(config)
-lewas.datastores.submitRequest(pickle.load(open(sys.argv[1])), config, False)
+for fn in sys.argv[1:]:
+ lewas.datastores.submitRequest(pickle.load(open(fn)), config, False)
+ print "processed", fn
|
d8a2eb66521f49c535d54cfa8a66965bc0fc8caf
|
tests/test_client.py
|
tests/test_client.py
|
from __future__ import unicode_literals
import pytest
from bugbuzz import BugBuzzClient
@pytest.fixture
def bugbuzz_client(base_url='http://localhost'):
return BugBuzzClient(base_url)
def test_random_access_key():
keys = set()
for _ in range(100):
client = bugbuzz_client()
keys.add(client.aes_key)
assert len(keys) == 100
|
from __future__ import unicode_literals
import pytest
from Crypto.Cipher import AES
from bugbuzz import BugBuzzClient
from bugbuzz import pkcs5_unpad
@pytest.fixture
def bugbuzz_client(base_url='http://localhost'):
return BugBuzzClient(base_url)
def test_random_access_key():
keys = set()
for _ in range(100):
client = bugbuzz_client()
keys.add(client.aes_key)
assert len(keys) == 100
def test_encrypt_decrypt(bugbuzz_client):
plaintext = b'super foobar'
iv, encrypted = bugbuzz_client.encrypt(plaintext)
assert encrypted != plaintext
aes = AES.new(bugbuzz_client.aes_key, AES.MODE_CBC, iv)
assert pkcs5_unpad(aes.decrypt(encrypted)) == plaintext
|
Add test for encrypt and decrypt
|
Add test for encrypt and decrypt
|
Python
|
mit
|
victorlin/bugbuzz-python,victorlin/bugbuzz-python
|
---
+++
@@ -1,8 +1,10 @@
from __future__ import unicode_literals
import pytest
+from Crypto.Cipher import AES
from bugbuzz import BugBuzzClient
+from bugbuzz import pkcs5_unpad
@pytest.fixture
@@ -16,3 +18,12 @@
client = bugbuzz_client()
keys.add(client.aes_key)
assert len(keys) == 100
+
+
+def test_encrypt_decrypt(bugbuzz_client):
+ plaintext = b'super foobar'
+ iv, encrypted = bugbuzz_client.encrypt(plaintext)
+ assert encrypted != plaintext
+
+ aes = AES.new(bugbuzz_client.aes_key, AES.MODE_CBC, iv)
+ assert pkcs5_unpad(aes.decrypt(encrypted)) == plaintext
|
5d44e71da2835ff9cdeb9fa21d0fcd8645c4365a
|
fjord/urls.py
|
fjord/urls.py
|
from django.conf import settings
from django.conf.urls.defaults import patterns, include
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
("User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS
else 'Disallow'),
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
|
from django.conf import settings
from django.conf.urls.defaults import patterns, include
from django.contrib.auth.decorators import login_required
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
from funfactory.monkeypatches import patch
patch()
from django.contrib import admin
from adminplus import AdminSitePlus
admin.site = AdminSitePlus()
admin.autodiscover()
admin.site.login = login_required(admin.site.login)
urlpatterns = patterns('',
(r'', include('fjord.analytics.urls')),
(r'', include('fjord.base.urls')),
(r'', include('fjord.feedback.urls')),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
("User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS
else 'Disallow'),
mimetype="text/plain"
)
),
(r'^browserid/', include('django_browserid.urls')),
(r'^admin/', include(admin.site.urls)),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
|
Revert "Nix helpful admin login requirement"
|
Revert "Nix helpful admin login requirement"
This reverts commit 684dc38622d6cbe70879fb900ce5d73146a0cb40.
We can put it back in because we're going to stick with LDAP basic
auth.
|
Python
|
bsd-3-clause
|
lgp171188/fjord,rlr/fjord,DESHRAJ/fjord,hoosteeno/fjord,lgp171188/fjord,staranjeet/fjord,lgp171188/fjord,mozilla/fjord,Ritsyy/fjord,DESHRAJ/fjord,mozilla/fjord,mozilla/fjord,hoosteeno/fjord,rlr/fjord,mozilla/fjord,hoosteeno/fjord,rlr/fjord,staranjeet/fjord,Ritsyy/fjord,DESHRAJ/fjord,Ritsyy/fjord,hoosteeno/fjord,staranjeet/fjord,staranjeet/fjord,lgp171188/fjord,Ritsyy/fjord,rlr/fjord
|
---
+++
@@ -1,5 +1,6 @@
from django.conf import settings
from django.conf.urls.defaults import patterns, include
+from django.contrib.auth.decorators import login_required
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse
@@ -11,6 +12,7 @@
admin.site = AdminSitePlus()
admin.autodiscover()
+admin.site.login = login_required(admin.site.login)
urlpatterns = patterns('',
|
904644ba540fbf6dcd47fb5a006d03a64d299fb2
|
src/yoi/pages.py
|
src/yoi/pages.py
|
from flask import request
from flaskext.genshi import render_response
from yoi.app import app
@app.route('/')
def get_index():
return render_response('index.html')
@app.route('/home')
def get_home():
return render_response('home.html')
@app.route('/journal')
def get_journal():
return render_response('journal.html')
@app.route('/new-entry')
def get_new_entry():
return render_response('new-entry.html')
|
from flask import request
from flaskext.genshi import render_response
from yoi.app import app
@app.route('/')
def index():
return render_response('index.html')
@app.route('/home')
def home():
return render_response('home.html')
@app.route('/journal')
def journal():
return render_response('journal.html')
@app.route('/new-entry')
def new_entry():
return render_response('new-entry.html')
|
Remove `get_` prefix on page functions.
|
Remove `get_` prefix on page functions.
|
Python
|
mit
|
doptio/you-owe-it,doptio/you-owe-it,doptio/you-owe-it,doptio/you-owe-it
|
---
+++
@@ -4,17 +4,17 @@
from yoi.app import app
@app.route('/')
-def get_index():
+def index():
return render_response('index.html')
@app.route('/home')
-def get_home():
+def home():
return render_response('home.html')
@app.route('/journal')
-def get_journal():
+def journal():
return render_response('journal.html')
@app.route('/new-entry')
-def get_new_entry():
+def new_entry():
return render_response('new-entry.html')
|
8365945ef62b8f9cd37022302e8ee6299716720d
|
masterfirefoxos/base/helpers.py
|
masterfirefoxos/base/helpers.py
|
from feincms.templatetags.feincms_tags import feincms_render_region
from jingo import register
from jinja2 import Markup
@register.function
def render_region(feincms_page, region, request):
return Markup(feincms_render_region(None, feincms_page, region, request))
|
from django.contrib.staticfiles.templatetags.staticfiles import static as static_helper
from feincms.templatetags.feincms_tags import feincms_render_region
from jingo import register
from jinja2 import Markup
static = register.function(static_helper)
@register.function
def render_region(feincms_page, region, request):
return Markup(feincms_render_region(None, feincms_page, region, request))
|
Add static helper for jinja2
|
Add static helper for jinja2
|
Python
|
mpl-2.0
|
craigcook/masterfirefoxos,glogiotatidis/masterfirefoxos,mozilla/masterfirefoxos,craigcook/masterfirefoxos,enng0227/masterfirefoxos,glogiotatidis/masterfirefoxos,mozilla/masterfirefoxos,liu21st/masterfirefoxos,enng0227/masterfirefoxos,enng0227/masterfirefoxos,mozilla/masterfirefoxos,glogiotatidis/masterfirefoxos,glogiotatidis/masterfirefoxos,liu21st/masterfirefoxos,liu21st/masterfirefoxos,enng0227/masterfirefoxos,craigcook/masterfirefoxos,liu21st/masterfirefoxos,mozilla/masterfirefoxos,craigcook/masterfirefoxos
|
---
+++
@@ -1,8 +1,12 @@
+from django.contrib.staticfiles.templatetags.staticfiles import static as static_helper
from feincms.templatetags.feincms_tags import feincms_render_region
from jingo import register
from jinja2 import Markup
+static = register.function(static_helper)
+
+
@register.function
def render_region(feincms_page, region, request):
return Markup(feincms_render_region(None, feincms_page, region, request))
|
538a48b0ec6589d4d8fcf1c4253898a0fd3367e3
|
hello.py
|
hello.py
|
# Based on "Flask is Fun" demo from Flask documentation.
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.config.update(
SERVER_NAME='127.0.0.1:%d' % int(os.environ.get('PORT', 5000)),
)
app.run()
|
# Based on "Flask is Fun" demo from Flask documentation.
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.config.update(
SERVER_NAME='127.0.0.1:%d' % int(os.environ.get('PORT', 5000)),
)
app.run(host='0.0.0.0')
|
Add external access for demo process
|
Add external access for demo process
Avoid using Lynx web browser on a Raspberry pi for reaching localhost on console :-)
|
Python
|
mit
|
smartmob-project/strawboss-demo,smartmob-project/strawboss-demo
|
---
+++
@@ -13,4 +13,4 @@
app.config.update(
SERVER_NAME='127.0.0.1:%d' % int(os.environ.get('PORT', 5000)),
)
- app.run()
+ app.run(host='0.0.0.0')
|
1598c699dc6bdf5d6edd700b70e11df207412dcd
|
hackernews.py
|
hackernews.py
|
import requests
class HackerNews():
def __init__(self):
self.url = 'https://hacker-news.firebaseio.com/v0/{uri}'
def request(self, method, uri):
url = self.url.format(uri=uri)
return requests.request(method, url)
def item(self, item_id):
r = self.request('GET', 'item/{item_id}.json'.format(item_id=item_id))
return r.json()
def user(self, user_id):
r = self.request('GET', 'user/{user_id}.json'.format(user_id=user_id))
return r.json()
def top_stories(self):
r = self.request('GET', 'topstories.json')
return r.json()
def max_item(self):
r = self.request('GET', 'maxitem.json')
return r.json()
def updates(self):
r = self.request('GET', 'updates.json')
return r.json()
|
from datetime import datetime
import requests
class HackerNews():
def __init__(self, timeout=5):
self.url = 'https://hacker-news.firebaseio.com/v0/{uri}'
self.timeout = timeout
def request(self, method, uri):
url = self.url.format(uri=uri)
return requests.request(method, url, timeout=self.timeout)
def item(self, item_id):
r = self.request('GET', 'item/{item_id}.json'.format(item_id=item_id))
item = r.json()
item['time'] = datetime.fromtimestamp(item['time'])
return item
def user(self, user_id):
r = self.request('GET', 'user/{user_id}.json'.format(user_id=user_id))
user = r.json()
user['created'] = datetime.fromtimestamp(user['created'])
return user
def top_stories(self):
r = self.request('GET', 'topstories.json')
return r.json()
def max_item(self):
r = self.request('GET', 'maxitem.json')
return r.json()
def updates(self):
r = self.request('GET', 'updates.json')
return r.json()
|
Convert timestamps to native datetime objects (breaking change)
|
Convert timestamps to native datetime objects (breaking change)
|
Python
|
mit
|
abrinsmead/hackernews-python
|
---
+++
@@ -1,22 +1,29 @@
+from datetime import datetime
+
import requests
class HackerNews():
- def __init__(self):
+ def __init__(self, timeout=5):
self.url = 'https://hacker-news.firebaseio.com/v0/{uri}'
+ self.timeout = timeout
def request(self, method, uri):
url = self.url.format(uri=uri)
- return requests.request(method, url)
+ return requests.request(method, url, timeout=self.timeout)
def item(self, item_id):
r = self.request('GET', 'item/{item_id}.json'.format(item_id=item_id))
- return r.json()
+ item = r.json()
+ item['time'] = datetime.fromtimestamp(item['time'])
+ return item
def user(self, user_id):
r = self.request('GET', 'user/{user_id}.json'.format(user_id=user_id))
- return r.json()
+ user = r.json()
+ user['created'] = datetime.fromtimestamp(user['created'])
+ return user
def top_stories(self):
r = self.request('GET', 'topstories.json')
@@ -29,4 +36,3 @@
def updates(self):
r = self.request('GET', 'updates.json')
return r.json()
-
|
8b7a7e09a53d23b37266c67995756cf1dadf520d
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.1.2"
class CMakeIncludeGuardConan(ConanFile):
name = "cmake-include-guard"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
requires = ("cmake-module-common/master@smspillaz/cmake-module-common", )
generators = "cmake"
url = "http://github.com/polysquare/cmake-include-guard"
licence = "MIT"
def source(self):
zip_name = "cmake-include-guard.zip"
download("https://github.com/polysquare/"
"cmake-include-guard/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/cmake-include-guard",
src="cmake-include-guard-" + VERSION,
keep_path=True)
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.1.2"
class CMakeIncludeGuardConan(ConanFile):
name = "cmake-include-guard"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
url = "http://github.com/polysquare/cmake-include-guard"
licence = "MIT"
options = {
"dev": [True, False]
}
default_options = "dev=False"
def requirements(self):
if self.options.dev:
self.requires("cmake-module-common/master@smspillaz/cmake-module-common")
def source(self):
zip_name = "cmake-include-guard.zip"
download("https://github.com/polysquare/"
"cmake-include-guard/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/cmake-include-guard",
src="cmake-include-guard-" + VERSION,
keep_path=True)
|
Make cmake-module-common a dev-only requirement
|
conan: Make cmake-module-common a dev-only requirement
|
Python
|
mit
|
polysquare/cmake-include-guard
|
---
+++
@@ -8,10 +8,17 @@
class CMakeIncludeGuardConan(ConanFile):
name = "cmake-include-guard"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
- requires = ("cmake-module-common/master@smspillaz/cmake-module-common", )
generators = "cmake"
url = "http://github.com/polysquare/cmake-include-guard"
licence = "MIT"
+ options = {
+ "dev": [True, False]
+ }
+ default_options = "dev=False"
+
+ def requirements(self):
+ if self.options.dev:
+ self.requires("cmake-module-common/master@smspillaz/cmake-module-common")
def source(self):
zip_name = "cmake-include-guard.zip"
|
70686bc62b85d36894b6e7909ceaa25d122ffe95
|
users/serializers.py
|
users/serializers.py
|
from rest_framework import serializers
from django.contrib.auth.models import User, Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = ('name',)
class UserSerializer(serializers.ModelSerializer):
groups_complete = GroupSerializer(source='groups', read_only=True, many=True)
class Meta:
model = User
fields = ('id', 'username', 'password', 'first_name',
'last_name', 'groups', 'groups_complete')
extra_kwargs = {
'username': {'required': True},
'password': {'required': True}
}
|
from rest_framework import serializers
from django.contrib.auth.models import User, Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = ('name',)
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'password', 'first_name',
'last_name', 'groups')
extra_kwargs = {
'username': {'required': True},
'password': {'required': False}
}
|
Remove nested serializer for groups, make password optional in serializer for user edition
|
Remove nested serializer for groups, make password optional in serializer for user edition
|
Python
|
mit
|
fernandolobato/balarco,fernandolobato/balarco,fernandolobato/balarco
|
---
+++
@@ -9,13 +9,11 @@
class UserSerializer(serializers.ModelSerializer):
- groups_complete = GroupSerializer(source='groups', read_only=True, many=True)
-
class Meta:
model = User
fields = ('id', 'username', 'password', 'first_name',
- 'last_name', 'groups', 'groups_complete')
+ 'last_name', 'groups')
extra_kwargs = {
'username': {'required': True},
- 'password': {'required': True}
+ 'password': {'required': False}
}
|
7d266a2e0f9d636272f60636d79703166b6377d2
|
tests/test_exceptions.py
|
tests/test_exceptions.py
|
# Tests for SecretStorage
# Author: Dmitry Shachnev, 2013
# License: BSD
# Various exception tests
import unittest
import secretstorage
from secretstorage.exceptions import ItemNotFoundException
class ExceptionsTest(unittest.TestCase):
"""A test case that ensures that all SecretStorage exceptions
are raised correctly."""
@classmethod
def setUpClass(cls):
cls.bus = secretstorage.dbus_init(main_loop=False)
cls.collection = secretstorage.Collection(cls.bus)
def test_double_deleting(self):
item = self.collection.create_item('MyItem',
{'application': 'secretstorage-test'}, b'pa$$word')
item.delete()
self.assertRaises(ItemNotFoundException, item.delete)
def test_non_existing_item(self):
self.assertRaises(ItemNotFoundException, secretstorage.Item,
self.bus, '/not/existing/path')
def test_non_existing_collection(self):
self.assertRaises(ItemNotFoundException,
secretstorage.get_collection_by_alias,
self.bus, 'non-existing-alias')
if __name__ == '__main__':
unittest.main()
|
# Tests for SecretStorage
# Author: Dmitry Shachnev, 2013
# License: BSD
# Various exception tests
import unittest
import secretstorage
from secretstorage.exceptions import ItemNotFoundException
class ExceptionsTest(unittest.TestCase):
"""A test case that ensures that all SecretStorage exceptions
are raised correctly."""
@classmethod
def setUpClass(cls):
cls.bus = secretstorage.dbus_init(main_loop=False)
cls.collection = secretstorage.Collection(cls.bus)
def test_double_deleting(self):
item = self.collection.create_item('MyItem',
{'application': 'secretstorage-test'}, b'pa$$word')
item.delete()
self.assertRaises(ItemNotFoundException, item.delete)
def test_non_existing_item(self):
self.assertRaises(ItemNotFoundException, secretstorage.Item,
self.bus, '/not/existing/path')
def test_non_existing_collection(self):
self.assertRaises(ItemNotFoundException,
secretstorage.get_collection_by_alias,
self.bus, 'non-existing-alias')
if __name__ == '__main__':
unittest.main()
|
Remove unwanted whitespace in tests
|
Remove unwanted whitespace in tests
|
Python
|
bsd-3-clause
|
mitya57/secretstorage
|
---
+++
@@ -26,7 +26,7 @@
def test_non_existing_item(self):
self.assertRaises(ItemNotFoundException, secretstorage.Item,
self.bus, '/not/existing/path')
-
+
def test_non_existing_collection(self):
self.assertRaises(ItemNotFoundException,
secretstorage.get_collection_by_alias,
|
c86ccf75fdf49115697548bb046bc83f348a1aba
|
tests/test_exceptions.py
|
tests/test_exceptions.py
|
from framewirc import exceptions
class MissingAttributesTest:
def test_message(self):
attrs = ['some', 'attrs']
expected = 'Required attribute(s) missing: {}'.format(attrs)
exception = exceptions.MissingAttributes(attrs)
assert str(exception) == expected
|
from framewirc import exceptions
def test_message():
attrs = ['some', 'attrs']
expected = 'Required attribute(s) missing: {}'.format(attrs)
exception = exceptions.MissingAttributes(attrs)
assert str(exception) == expected
|
Fix test that wasn't running
|
Fix test that wasn't running
|
Python
|
bsd-2-clause
|
meshy/framewirc
|
---
+++
@@ -1,11 +1,10 @@
from framewirc import exceptions
-class MissingAttributesTest:
- def test_message(self):
- attrs = ['some', 'attrs']
- expected = 'Required attribute(s) missing: {}'.format(attrs)
+def test_message():
+ attrs = ['some', 'attrs']
+ expected = 'Required attribute(s) missing: {}'.format(attrs)
- exception = exceptions.MissingAttributes(attrs)
+ exception = exceptions.MissingAttributes(attrs)
- assert str(exception) == expected
+ assert str(exception) == expected
|
4b926ab5de00bc4885021fff0b2ac3679703707a
|
nova/db/sqlalchemy/migrate_repo/versions/034_change_instance_id_in_migrations.py
|
nova/db/sqlalchemy/migrate_repo/versions/034_change_instance_id_in_migrations.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
Drop FK before dropping instance_id column.
|
Drop FK before dropping instance_id column.
|
Python
|
apache-2.0
|
klmitch/nova,hanlind/nova,aristanetworks/arista-ovs-nova,fajoy/nova,dawnpower/nova,gooddata/openstack-nova,joker946/nova,shail2810/nova,orbitfp7/nova,jeffrey4l/nova,zaina/nova,tianweizhang/nova,ruslanloman/nova,mgagne/nova,yosshy/nova,cloudbase/nova,paulmathews/nova,JioCloud/nova_test_latest,savi-dev/nova,russellb/nova,nikesh-mahalka/nova,BeyondTheClouds/nova,scripnichenko/nova,leilihh/nova,gooddata/openstack-nova,vmturbo/nova,JianyuWang/nova,badock/nova,tudorvio/nova,CloudServer/nova,TieWei/nova,sileht/deb-openstack-nova,MountainWei/nova,fnordahl/nova,mmnelemane/nova,psiwczak/openstack,bclau/nova,Stavitsky/nova,NeCTAR-RC/nova,viggates/nova,jianghuaw/nova,aristanetworks/arista-ovs-nova,CiscoSystems/nova,yosshy/nova,cloudbase/nova-virtualbox,affo/nova,dstroppa/openstack-smartos-nova-grizzly,akash1808/nova,yrobla/nova,ruslanloman/nova,openstack/nova,vladikr/nova_drafts,maoy/zknova,bigswitch/nova,apporc/nova,barnsnake351/nova,dims/nova,salv-orlando/MyRepo,spring-week-topos/nova-week,devendermishrajio/nova_test_latest,edulramirez/nova,saleemjaveds/https-github.com-openstack-nova,Triv90/Nova,dims/nova,paulmathews/nova,houshengbo/nova_vmware_compute_driver,angdraug/nova,vmturbo/nova,watonyweng/nova,usc-isi/nova,gooddata/openstack-nova,sridevikoushik31/openstack,NoBodyCam/TftpPxeBootBareMetal,usc-isi/extra-specs,SUSE-Cloud/nova,mikalstill/nova,belmiromoreira/nova,Triv90/Nova,vmturbo/nova,cyx1231st/nova,petrutlucian94/nova,shahar-stratoscale/nova,rahulunair/nova,MountainWei/nova,gspilio/nova,berrange/nova,gspilio/nova,felixma/nova,nikesh-mahalka/nova,redhat-openstack/nova,badock/nova,noironetworks/nova,joker946/nova,alexandrucoman/vbox-nova-driver,shahar-stratoscale/nova,isyippee/nova,yrobla/nova,salv-orlando/MyRepo,petrutlucian94/nova_dev,rajalokan/nova,maelnor/nova,russellb/nova,isyippee/nova,rahulunair/nova,houshengbo/nova_vmware_compute_driver,tealover/nova,whitepages/nova,alvarolopez/nova,TwinkleChawla/nova,maoy/zknova,mahak/nova,usc-isi/nova,cyx1231st/nova,iuliat/nova,tianweizhang/nova,eayunstack/nova,ewindisch/nova,JioCloud/nova,DirectXMan12/nova-hacking,virtualopensystems/nova,BeyondTheClouds/nova,leilihh/nova,petrutlucian94/nova_dev,rajalokan/nova,luogangyi/bcec-nova,tudorvio/nova,raildo/nova,shootstar/novatest,fajoy/nova,sridevikoushik31/openstack,maelnor/nova,tanglei528/nova,tanglei528/nova,ewindisch/nova,j-carpentier/nova,josephsuh/extra-specs,Brocade-OpenSource/OpenStack-DNRM-Nova,eneabio/nova,rrader/nova-docker-plugin,LoHChina/nova,mahak/nova,TwinkleChawla/nova,Juniper/nova,imsplitbit/nova,mikalstill/nova,scripnichenko/nova,akash1808/nova_test_latest,OpenAcademy-OpenStack/nova-scheduler,CiscoSystems/nova,rajalokan/nova,bgxavier/nova,NoBodyCam/TftpPxeBootBareMetal,saleemjaveds/https-github.com-openstack-nova,alexandrucoman/vbox-nova-driver,noironetworks/nova,NewpTone/stacklab-nova,zzicewind/nova,tealover/nova,phenoxim/nova,yrobla/nova,Francis-Liu/animated-broccoli,devoid/nova,mgagne/nova,DirectXMan12/nova-hacking,Metaswitch/calico-nova,sridevikoushik31/nova,KarimAllah/nova,berrange/nova,qwefi/nova,fnordahl/nova,blueboxgroup/nova,CCI-MOC/nova,kimjaejoong/nova,klmitch/nova,CCI-MOC/nova,spring-week-topos/nova-week,edulramirez/nova,akash1808/nova_test_latest,angdraug/nova,josephsuh/extra-specs,Yuriy-Leonov/nova,affo/nova,double12gzh/nova,iuliat/nova,projectcalico/calico-nova,vmturbo/nova,redhat-openstack/nova,Yuriy-Leonov/nova,josephsuh/extra-specs,gspilio/nova,savi-dev/nova,Triv90/Nova,mandeepdhami/nova,rahulunair/nova,devendermishrajio/nova,zhimin711/nova,savi-dev/nova,DirectXMan12/nova-hacking,mandeepdhami/nova,Juniper/nova,dstroppa/openstack-smartos-nova-grizzly,double12gzh/nova,Juniper/nova,projectcalico/calico-nova,takeshineshiro/nova,blueboxgroup/nova,devendermishrajio/nova_test_latest,adelina-t/nova,JianyuWang/nova,eonpatapon/nova,j-carpentier/nova,dawnpower/nova,bigswitch/nova,tangfeixiong/nova,SUSE-Cloud/nova,silenceli/nova,devendermishrajio/nova,sebrandon1/nova,plumgrid/plumgrid-nova,CEG-FYP-OpenStack/scheduler,Metaswitch/calico-nova,Yusuke1987/openstack_template,JioCloud/nova_test_latest,sridevikoushik31/openstack,citrix-openstack-build/nova,ntt-sic/nova,ted-gould/nova,sebrandon1/nova,klmitch/nova,fajoy/nova,salv-orlando/MyRepo,sacharya/nova,Tehsmash/nova,NeCTAR-RC/nova,mikalstill/nova,phenoxim/nova,Stavitsky/nova,dstroppa/openstack-smartos-nova-grizzly,eneabio/nova,shail2810/nova,alvarolopez/nova,eneabio/nova,tangfeixiong/nova,akash1808/nova,usc-isi/extra-specs,hanlind/nova,klmitch/nova,rickerc/nova_audit,bgxavier/nova,usc-isi/extra-specs,usc-isi/nova,psiwczak/openstack,varunarya10/nova_test_latest,thomasem/nova,viggates/nova,JioCloud/nova,yatinkumbhare/openstack-nova,bclau/nova,barnsnake351/nova,takeshineshiro/nova,alaski/nova,openstack/nova,Francis-Liu/animated-broccoli,alaski/nova,leilihh/novaha,ted-gould/nova,felixma/nova,leilihh/novaha,CEG-FYP-OpenStack/scheduler,ntt-sic/nova,jianghuaw/nova,paulmathews/nova,CloudServer/nova,sridevikoushik31/nova,rrader/nova-docker-plugin,varunarya10/nova_test_latest,LoHChina/nova,russellb/nova,maheshp/novatest,qwefi/nova,silenceli/nova,eonpatapon/nova,sacharya/nova,thomasem/nova,psiwczak/openstack,NoBodyCam/TftpPxeBootBareMetal,rickerc/nova_audit,KarimAllah/nova,sileht/deb-openstack-nova,eharney/nova,shootstar/novatest,cloudbase/nova-virtualbox,rajalokan/nova,Tehsmash/nova,virtualopensystems/nova,cloudbau/nova,houshengbo/nova_vmware_compute_driver,maheshp/novatest,Brocade-OpenSource/OpenStack-DNRM-Nova,kimjaejoong/nova,eayunstack/nova,gooddata/openstack-nova,maheshp/novatest,cernops/nova,raildo/nova,sridevikoushik31/nova,mahak/nova,Juniper/nova,cloudbau/nova,maoy/zknova,sridevikoushik31/nova,jianghuaw/nova,luogangyi/bcec-nova,sebrandon1/nova,plumgrid/plumgrid-nova,jeffrey4l/nova,imsplitbit/nova,NewpTone/stacklab-nova,Yusuke1987/openstack_template,eharney/nova,orbitfp7/nova,zzicewind/nova,openstack/nova,jianghuaw/nova,apporc/nova,zhimin711/nova,vladikr/nova_drafts,cloudbase/nova,BeyondTheClouds/nova,cernops/nova,watonyweng/nova,mmnelemane/nova,OpenAcademy-OpenStack/nova-scheduler,aristanetworks/arista-ovs-nova,adelina-t/nova,citrix-openstack-build/nova,sileht/deb-openstack-nova,cloudbase/nova,NewpTone/stacklab-nova,zaina/nova,KarimAllah/nova,petrutlucian94/nova,yatinkumbhare/openstack-nova,TieWei/nova,whitepages/nova,hanlind/nova,devoid/nova,belmiromoreira/nova,cernops/nova
|
---
+++
@@ -17,6 +17,7 @@
from sqlalchemy import Column, Integer, String, MetaData, Table
+
meta = MetaData()
@@ -33,6 +34,11 @@
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
+
+ if migrate_engine.name == "mysql":
+ migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
+ "`migrations_ibfk_1`;")
+
migrations.c.instance_id.drop()
|
cd9c6f96f68391cf333b68e1ea28e513e869aca3
|
troposphere/detective.py
|
troposphere/detective.py
|
# Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class Graph(AWSObject):
resource_type = "AWS::Detective::Graph"
props = {
}
class MemberInvitation(AWSObject):
resource_type = "AWS::Detective::MemberInvitation"
props = {
'GraphArn': (basestring, True),
'MemberEmailAddress': (basestring, True),
'MemberId': (basestring, True),
'Message': (basestring, False),
}
|
# Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
from .validators import boolean
class Graph(AWSObject):
resource_type = "AWS::Detective::Graph"
props = {
}
class MemberInvitation(AWSObject):
resource_type = "AWS::Detective::MemberInvitation"
props = {
'DisableEmailNotification': (boolean, False),
'GraphArn': (basestring, True),
'MemberEmailAddress': (basestring, True),
'MemberId': (basestring, True),
'Message': (basestring, False),
}
|
Update Detective per 2021-03-15 changes
|
Update Detective per 2021-03-15 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
---
+++
@@ -4,6 +4,7 @@
# See LICENSE file for full license.
from . import AWSObject
+from .validators import boolean
class Graph(AWSObject):
@@ -17,6 +18,7 @@
resource_type = "AWS::Detective::MemberInvitation"
props = {
+ 'DisableEmailNotification': (boolean, False),
'GraphArn': (basestring, True),
'MemberEmailAddress': (basestring, True),
'MemberId': (basestring, True),
|
08a1bfc233dc44f40ee0b8a5950eda19bca1d6a6
|
quran_tafseer/serializers.py
|
quran_tafseer/serializers.py
|
from django.urls import reverse
from rest_framework import serializers
from .models import Tafseer, TafseerText
class TafseerSerializer(serializers.ModelSerializer):
class Meta:
model = Tafseer
fields = ['id', 'name']
class TafseerTextSerializer(serializers.ModelSerializer):
tafseer_id = serializers.IntegerField(source='tafseer.id')
tafseer_name = serializers.CharField(source='tafseer.name')
ayah_url = serializers.SerializerMethodField()
def get_ayah_url(self, obj):
return reverse('ayah-detail', kwargs={'number': obj.ayah.number,
'sura_num': obj.ayah.sura.pk})
class Meta:
model = TafseerText
fields = ['tafseer_id', 'tafseer_name', 'ayah_url', 'ayah', 'text']
|
from django.urls import reverse
from rest_framework import serializers
from .models import Tafseer, TafseerText
class TafseerSerializer(serializers.ModelSerializer):
class Meta:
model = Tafseer
fields = ['id', 'name']
class TafseerTextSerializer(serializers.ModelSerializer):
tafseer_id = serializers.IntegerField(source='tafseer.id')
tafseer_name = serializers.CharField(source='tafseer.name')
ayah_url = serializers.SerializerMethodField()
ayah_number = serializers.IntegerField(source='ayah')
def get_ayah_url(self, obj):
return reverse('ayah-detail', kwargs={'number': obj.ayah.number,
'sura_num': obj.ayah.sura.pk})
class Meta:
model = TafseerText
fields = ['tafseer_id', 'tafseer_name', 'ayah_url', 'ayah_number', 'text']
|
Change serializer field name from ayah to ayah_number
|
Change serializer field name from ayah to ayah_number
|
Python
|
mit
|
EmadMokhtar/tafseer_api
|
---
+++
@@ -16,6 +16,7 @@
tafseer_id = serializers.IntegerField(source='tafseer.id')
tafseer_name = serializers.CharField(source='tafseer.name')
ayah_url = serializers.SerializerMethodField()
+ ayah_number = serializers.IntegerField(source='ayah')
def get_ayah_url(self, obj):
return reverse('ayah-detail', kwargs={'number': obj.ayah.number,
@@ -23,4 +24,4 @@
class Meta:
model = TafseerText
- fields = ['tafseer_id', 'tafseer_name', 'ayah_url', 'ayah', 'text']
+ fields = ['tafseer_id', 'tafseer_name', 'ayah_url', 'ayah_number', 'text']
|
efb636d392dab44fcc110b8d17933476ba90996d
|
Donut/__init__.py
|
Donut/__init__.py
|
import flask
import sqlalchemy
from Donut import config, constants
from Donut.modules import example
app = flask.Flask(__name__)
app.debug = False
app.secret_key = config.SECRET_KEY
# Maximum file upload size, in bytes.
app.config['MAX_CONTENT_LENGTH'] = constants.MAX_CONTENT_LENGTH
# Load blueprint modules
app.register_blueprint(example.blueprint, url_prefix='/example')
# Create database engine object.
# TODO##DatabaseWork: We currently don't have a database set up, so we can't
# reference sqlalchemy yet. However, it serves as a good example implementation.
# engine = sqlalchemy.create_engine(config.DB_URI, convert_unicode=True)
@app.before_request
def before_request():
"""Logic executed before request is processed."""
# TODO#DatabaseWork uncomment this line
# flask.g.db = engine.connect()
@app.teardown_request
def teardown_request(exception):
"""Logic executed after every request is finished."""
# TODO#DatabaseWork uncomment these lines
# if flask.g.db != None:
# flask.g.db.close()
# After initialization, import the routes.
from Donut import routes
|
import flask
import sqlalchemy
import os
from Donut import constants
from Donut.modules import example
app = flask.Flask(__name__)
app.debug = False
# Get app config, if we're not testing on travis.
if 'TRAVIS' not in os.environ:
app.config.from_object('Donut.config')
# Maximum file upload size, in bytes.
app.config['MAX_CONTENT_LENGTH'] = constants.MAX_CONTENT_LENGTH
# Load blueprint modules
app.register_blueprint(example.blueprint, url_prefix='/example')
# Create database engine object.
# TODO##DatabaseWork: We currently don't have a database set up, so we can't
# reference sqlalchemy yet. However, it serves as a good example implementation.
# engine = sqlalchemy.create_engine(app.config['DB_URI'], convert_unicode=True)
@app.before_request
def before_request():
"""Logic executed before request is processed."""
# TODO#DatabaseWork uncomment this line
# flask.g.db = engine.connect()
@app.teardown_request
def teardown_request(exception):
"""Logic executed after every request is finished."""
# TODO#DatabaseWork uncomment these lines
# if flask.g.db != None:
# flask.g.db.close()
# After initialization, import the routes.
from Donut import routes
|
Change how we get config settings
|
Change how we get config settings
- Flask recommends using `app.config.from_object` to get config settings.
- Don't get config settings if we're using travis, since it won't have the
settings from the repo.
|
Python
|
mit
|
ASCIT/donut,ASCIT/donut-python,ASCIT/donut,ASCIT/donut-python,ASCIT/donut
|
---
+++
@@ -1,12 +1,16 @@
import flask
import sqlalchemy
+import os
-from Donut import config, constants
+from Donut import constants
from Donut.modules import example
app = flask.Flask(__name__)
app.debug = False
-app.secret_key = config.SECRET_KEY
+
+# Get app config, if we're not testing on travis.
+if 'TRAVIS' not in os.environ:
+ app.config.from_object('Donut.config')
# Maximum file upload size, in bytes.
app.config['MAX_CONTENT_LENGTH'] = constants.MAX_CONTENT_LENGTH
@@ -17,7 +21,7 @@
# Create database engine object.
# TODO##DatabaseWork: We currently don't have a database set up, so we can't
# reference sqlalchemy yet. However, it serves as a good example implementation.
-# engine = sqlalchemy.create_engine(config.DB_URI, convert_unicode=True)
+# engine = sqlalchemy.create_engine(app.config['DB_URI'], convert_unicode=True)
@app.before_request
def before_request():
|
31c79697db0d5c973cff9b845ed28845695ecb02
|
website/addons/twofactor/views.py
|
website/addons/twofactor/views.py
|
# -*- coding: utf-8 -*-
import httplib as http
from framework import request
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError
from website.project.decorators import must_have_addon
@must_be_logged_in
@must_have_addon('twofactor', 'user')
def user_settings(user_addon, *args, **kwargs):
code = request.json.get('code')
if code is None:
raise HTTPError(code=http.BAD_REQUEST)
if user_addon.verify_code(code):
user_addon.is_confirmed = True
raise HTTPError(http.FORBIDDEN, data=dict(
message_short='Forbidden',
message_long='The two-factor verification code you provided is invalid.'
))
|
# -*- coding: utf-8 -*-
import httplib as http
from framework import request
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError
from website.project.decorators import must_have_addon
@must_be_logged_in
@must_have_addon('twofactor', 'user')
def user_settings(user_addon, *args, **kwargs):
code = request.json.get('code')
if code is None:
raise HTTPError(code=http.BAD_REQUEST)
if user_addon.verify_code(code):
user_addon.is_confirmed = True
user_addon.save()
return {'message': 'Successfully verified two-factor authentication.'}, http.OK
raise HTTPError(http.FORBIDDEN, data=dict(
message_short='Forbidden',
message_long='The two-factor verification code you provided is invalid.'
))
|
Fix response when user successfully confirms 2fa
|
Fix response when user successfully confirms 2fa
|
Python
|
apache-2.0
|
CenterForOpenScience/osf.io,revanthkolli/osf.io,jnayak1/osf.io,mluo613/osf.io,caneruguz/osf.io,alexschiller/osf.io,adlius/osf.io,himanshuo/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,jeffreyliu3230/osf.io,cslzchen/osf.io,cwisecarver/osf.io,billyhunt/osf.io,SSJohns/osf.io,cosenal/osf.io,GageGaskins/osf.io,kushG/osf.io,zamattiac/osf.io,njantrania/osf.io,petermalcolm/osf.io,jinluyuan/osf.io,bdyetton/prettychart,monikagrabowska/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,chennan47/osf.io,haoyuchen1992/osf.io,cwisecarver/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,wearpants/osf.io,binoculars/osf.io,jolene-esposito/osf.io,billyhunt/osf.io,reinaH/osf.io,kushG/osf.io,GageGaskins/osf.io,kwierman/osf.io,leb2dg/osf.io,felliott/osf.io,revanthkolli/osf.io,Johnetordoff/osf.io,caseyrygt/osf.io,Nesiehr/osf.io,acshi/osf.io,barbour-em/osf.io,doublebits/osf.io,acshi/osf.io,zkraime/osf.io,TomBaxter/osf.io,kushG/osf.io,baylee-d/osf.io,adlius/osf.io,cosenal/osf.io,jolene-esposito/osf.io,kushG/osf.io,njantrania/osf.io,DanielSBrown/osf.io,sbt9uc/osf.io,adlius/osf.io,MerlinZhang/osf.io,caseyrygt/osf.io,zachjanicki/osf.io,SSJohns/osf.io,TomBaxter/osf.io,cldershem/osf.io,reinaH/osf.io,revanthkolli/osf.io,zachjanicki/osf.io,sloria/osf.io,felliott/osf.io,hmoco/osf.io,hmoco/osf.io,brianjgeiger/osf.io,HarryRybacki/osf.io,danielneis/osf.io,emetsger/osf.io,chennan47/osf.io,icereval/osf.io,RomanZWang/osf.io,jeffreyliu3230/osf.io,kch8qx/osf.io,chennan47/osf.io,binoculars/osf.io,KAsante95/osf.io,amyshi188/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,cwisecarver/osf.io,AndrewSallans/osf.io,mluo613/osf.io,alexschiller/osf.io,fabianvf/osf.io,alexschiller/osf.io,mluke93/osf.io,abought/osf.io,MerlinZhang/osf.io,zachjanicki/osf.io,kch8qx/osf.io,MerlinZhang/osf.io,dplorimer/osf,cosenal/osf.io,Nesiehr/osf.io,mluo613/osf.io,aaxelb/osf.io,asanfilippo7/osf.io,arpitar/osf.io,cwisecarver/osf.io,lamdnhan/osf.io,arpitar/osf.io,ckc6cz/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,wearpants/osf.io,alexschiller/osf.io,TomBaxter/osf.io,icereval/osf.io,doublebits/osf.io,crcresearch/osf.io,HarryRybacki/osf.io,KAsante95/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,haoyuchen1992/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,dplorimer/osf,GaryKriebel/osf.io,doublebits/osf.io,samchrisinger/osf.io,fabianvf/osf.io,wearpants/osf.io,jinluyuan/osf.io,chrisseto/osf.io,samanehsan/osf.io,mluke93/osf.io,leb2dg/osf.io,GaryKriebel/osf.io,laurenrevere/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,cslzchen/osf.io,ckc6cz/osf.io,pattisdr/osf.io,brandonPurvis/osf.io,leb2dg/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,sloria/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,arpitar/osf.io,aaxelb/osf.io,KAsante95/osf.io,amyshi188/osf.io,HarryRybacki/osf.io,brandonPurvis/osf.io,Nesiehr/osf.io,cosenal/osf.io,jinluyuan/osf.io,zamattiac/osf.io,abought/osf.io,KAsante95/osf.io,rdhyee/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,acshi/osf.io,sbt9uc/osf.io,chrisseto/osf.io,felliott/osf.io,Johnetordoff/osf.io,DanielSBrown/osf.io,emetsger/osf.io,caseyrygt/osf.io,caseyrollins/osf.io,TomHeatwole/osf.io,lamdnhan/osf.io,lyndsysimon/osf.io,crcresearch/osf.io,zkraime/osf.io,mluo613/osf.io,brandonPurvis/osf.io,ckc6cz/osf.io,erinspace/osf.io,jnayak1/osf.io,HarryRybacki/osf.io,mluke93/osf.io,felliott/osf.io,mfraezz/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,chrisseto/osf.io,Nesiehr/osf.io,SSJohns/osf.io,dplorimer/osf,jmcarp/osf.io,cslzchen/osf.io,crcresearch/osf.io,samchrisinger/osf.io,revanthkolli/osf.io,bdyetton/prettychart,abought/osf.io,erinspace/osf.io,caseyrollins/osf.io,GageGaskins/osf.io,lyndsysimon/osf.io,cslzchen/osf.io,reinaH/osf.io,wearpants/osf.io,zkraime/osf.io,lamdnhan/osf.io,jeffreyliu3230/osf.io,RomanZWang/osf.io,lyndsysimon/osf.io,caneruguz/osf.io,GageGaskins/osf.io,mfraezz/osf.io,pattisdr/osf.io,baylee-d/osf.io,Ghalko/osf.io,GaryKriebel/osf.io,chrisseto/osf.io,Ghalko/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,acshi/osf.io,cldershem/osf.io,rdhyee/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,arpitar/osf.io,baylee-d/osf.io,samanehsan/osf.io,rdhyee/osf.io,acshi/osf.io,mfraezz/osf.io,njantrania/osf.io,monikagrabowska/osf.io,fabianvf/osf.io,mattclark/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,saradbowman/osf.io,GaryKriebel/osf.io,fabianvf/osf.io,abought/osf.io,kwierman/osf.io,aaxelb/osf.io,MerlinZhang/osf.io,barbour-em/osf.io,jinluyuan/osf.io,monikagrabowska/osf.io,sbt9uc/osf.io,SSJohns/osf.io,KAsante95/osf.io,dplorimer/osf,himanshuo/osf.io,jnayak1/osf.io,barbour-em/osf.io,cldershem/osf.io,sloria/osf.io,HalcyonChimera/osf.io,sbt9uc/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,Ghalko/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,leb2dg/osf.io,mfraezz/osf.io,jmcarp/osf.io,AndrewSallans/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,petermalcolm/osf.io,reinaH/osf.io,erinspace/osf.io,mluo613/osf.io,zachjanicki/osf.io,jolene-esposito/osf.io,laurenrevere/osf.io,amyshi188/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,ckc6cz/osf.io,hmoco/osf.io,zkraime/osf.io,doublebits/osf.io,kwierman/osf.io,jeffreyliu3230/osf.io,himanshuo/osf.io,jmcarp/osf.io,emetsger/osf.io,himanshuo/osf.io,lamdnhan/osf.io,icereval/osf.io,asanfilippo7/osf.io,doublebits/osf.io,bdyetton/prettychart,njantrania/osf.io,hmoco/osf.io,caneruguz/osf.io,haoyuchen1992/osf.io,lyndsysimon/osf.io,asanfilippo7/osf.io,aaxelb/osf.io,samanehsan/osf.io,cldershem/osf.io,ZobairAlijan/osf.io,jmcarp/osf.io,adlius/osf.io,kch8qx/osf.io,emetsger/osf.io,danielneis/osf.io,mluke93/osf.io,samchrisinger/osf.io,caseyrollins/osf.io,amyshi188/osf.io,ticklemepierce/osf.io,jolene-esposito/osf.io,HalcyonChimera/osf.io,danielneis/osf.io,kwierman/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,danielneis/osf.io,petermalcolm/osf.io,brandonPurvis/osf.io,barbour-em/osf.io,Ghalko/osf.io,petermalcolm/osf.io,ZobairAlijan/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,billyhunt/osf.io,bdyetton/prettychart,zamattiac/osf.io
|
---
+++
@@ -18,6 +18,8 @@
if user_addon.verify_code(code):
user_addon.is_confirmed = True
+ user_addon.save()
+ return {'message': 'Successfully verified two-factor authentication.'}, http.OK
raise HTTPError(http.FORBIDDEN, data=dict(
message_short='Forbidden',
message_long='The two-factor verification code you provided is invalid.'
|
8ce2cbff7063b97e249dd87dd2acb8d83bdbf509
|
examples/py/fetch-ohlcv-sequentially.py
|
examples/py/fetch-ohlcv-sequentially.py
|
# -*- coding: utf-8 -*-
import os
import sys
import time
# -----------------------------------------------------------------------------
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(root)
# -----------------------------------------------------------------------------
import ccxt # noqa: E402
# -----------------------------------------------------------------------------
# common constants
msec = 1000
minute = 60 * msec
# -----------------------------------------------------------------------------
kraken = ccxt.kraken()
# -----------------------------------------------------------------------------
from_datetime = '2017-09-01 00:00:00'
from_timestamp = kraken.parse8601(from_datetime)
# -----------------------------------------------------------------------------
now = kraken.milliseconds()
# -----------------------------------------------------------------------------
while from_timestamp < now:
print('Fetching candles starting from', kraken.iso8601(from_timestamp))
ohlcvs = kraken.fetch_ohlcv('BTC/USD', '1m', from_timestamp)
# don't hit the rateLimit or you will be banned
time.sleep(kraken.rateLimit / msec)
# Kraken returns 720 candles for 1m timeframe at once
from_timestamp += len(ohlcvs) * minute
|
# -*- coding: utf-8 -*-
import os
import sys
import time
# -----------------------------------------------------------------------------
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(root)
# -----------------------------------------------------------------------------
import ccxt # noqa: E402
# -----------------------------------------------------------------------------
# common constants
msec = 1000
minute = 60 * msec
# -----------------------------------------------------------------------------
kraken = ccxt.kraken()
# -----------------------------------------------------------------------------
from_datetime = '2017-09-01 00:00:00'
from_timestamp = kraken.parse8601(from_datetime)
# -----------------------------------------------------------------------------
now = kraken.milliseconds()
# -----------------------------------------------------------------------------
data = []
while from_timestamp < now:
print('Fetching candles starting from', kraken.iso8601(from_timestamp))
ohlcvs = kraken.fetch_ohlcv('BTC/USD', '1m', from_timestamp)
# don't hit the rateLimit or you will be banned
time.sleep(3 * kraken.rateLimit / msec)
# Kraken returns 720 candles for 1m timeframe at once
from_timestamp += len(ohlcvs) * minute
data += ohlcvs
|
Add delay 3 times the rate limit to avoid API errors
|
Add delay 3 times the rate limit to avoid API errors
|
Python
|
mit
|
tritoanst/ccxt,tritoanst/ccxt,ccxt/ccxt,ccxt/ccxt,tritoanst/ccxt,ccxt/ccxt,ccxt/ccxt,ccxt/ccxt,tritoanst/ccxt
|
---
+++
@@ -34,6 +34,7 @@
# -----------------------------------------------------------------------------
+data = []
while from_timestamp < now:
print('Fetching candles starting from', kraken.iso8601(from_timestamp))
@@ -41,7 +42,9 @@
ohlcvs = kraken.fetch_ohlcv('BTC/USD', '1m', from_timestamp)
# don't hit the rateLimit or you will be banned
- time.sleep(kraken.rateLimit / msec)
+ time.sleep(3 * kraken.rateLimit / msec)
# Kraken returns 720 candles for 1m timeframe at once
from_timestamp += len(ohlcvs) * minute
+
+ data += ohlcvs
|
6e6c5bb9f02a4f9d380ee20216f710a6de0b0583
|
fenum.py
|
fenum.py
|
#!/bin/env python
import os
import sys
def main():
if len(sys.argv) == 1 or sys.argv[1].lower() == "-h" or sys.argv[1].lower() == "--help":
print("Syntax: fenum.py [files...]")
print("\tEnumerate the given files (starting at 1) in the same order as they are passed to the script.")
return
for k,v in enumerate(sys.argv[1:], 1):
path, name = os.path.split(v if not v.endswith("/") else v[:-1])
if path:
path += "/"
try:
fname = "{}{} - {}".format(path, str(k).zfill(len(str(len(sys.argv) - 1))), name)
print("\"{}\" -> \"{}\"".format(v, fname))
os.rename(v, fname)
except Exception as e:
print(str(e))
main()
|
#!/bin/env python
import os
import sys
import math
def main():
if len(sys.argv) == 1 or sys.argv[1].lower() == "-h" or sys.argv[1].lower() == "--help":
print("Syntax: fenum.py [files...]")
print("\tEnumerate the given files (starting at 1) in the same order as they are passed to the script.")
return
for k,v in enumerate(sys.argv[1:], 1):
path, name = os.path.split(v if not v.endswith("/") else v[:-1])
if path:
path += "/"
try:
fname = "{}{} - {}".format(
path,
str(k).zfill(int(math.log10(len(sys.argv) - 1)) + 1),
name)
print("\"{}\" -> \"{}\"".format(v, fname))
os.rename(v, fname)
except Exception as e:
print(str(e))
main()
|
Use log10 to get the amount of digits
|
Use log10 to get the amount of digits
|
Python
|
mit
|
mphe/scripts,mphe/scripts,mall0c/scripts,mall0c/scripts
|
---
+++
@@ -2,6 +2,7 @@
import os
import sys
+import math
def main():
if len(sys.argv) == 1 or sys.argv[1].lower() == "-h" or sys.argv[1].lower() == "--help":
@@ -14,7 +15,10 @@
if path:
path += "/"
try:
- fname = "{}{} - {}".format(path, str(k).zfill(len(str(len(sys.argv) - 1))), name)
+ fname = "{}{} - {}".format(
+ path,
+ str(k).zfill(int(math.log10(len(sys.argv) - 1)) + 1),
+ name)
print("\"{}\" -> \"{}\"".format(v, fname))
os.rename(v, fname)
except Exception as e:
|
1c58cdadb93180557275682fa34433de2b736445
|
forms.py
|
forms.py
|
from flask.ext.wtf import Form
from wtforms import SelectField, BooleanField, IntegerField, TextField, \
validators
# TODO add vailidation
class TeamForm(Form):
number = IntegerField("Number", [validators.Required(),
validators.NumberRange(min=1, max=99999)])
name = TextField("Name", [validators.Required(),
validators.Length(min=1, max=50)])
affiliation = TextField("Affiliation", [validators.Length(min=1, max=200)])
city = TextField("City", [validators.Length(min=1, max=50)])
state = TextField("State", [validators.Length(min=2, max=2)])
class ScoreForm(Form):
team_id = SelectField(u'Team', coerce=int)
tree_branch_is_closer = BooleanField(default=False)
tree_branch_is_intact = BooleanField(default=False)
cargo_plane_location = SelectField(choices=[('0', 'None'),
('1', 'Yellow only'),
('2', 'Light blue')])
|
from flask.ext.wtf import Form
from wtforms import SelectField, BooleanField, IntegerField, TextField, \
validators
class TeamForm(Form):
number = IntegerField("Number", [validators.Required(),
validators.NumberRange(min=1, max=99999)])
name = TextField("Name", [validators.Required(),
validators.Length(min=1, max=50)])
affiliation = TextField("Affiliation", [validators.Length(min=1, max=200)])
city = TextField("City", [validators.Length(min=1, max=50)])
state = TextField("State", [validators.Length(min=2, max=2)])
# TODO add validation
class ScoreForm(Form):
team_id = SelectField(u'Team', coerce=int)
tree_branch_is_closer = BooleanField(default=False)
tree_branch_is_intact = BooleanField(default=False)
cargo_plane_location = SelectField(choices=[('0', 'None'),
('1', 'Yellow only'),
('2', 'Light blue')])
|
Move TODO tag to correct class
|
Move TODO tag to correct class
|
Python
|
mit
|
rtfoley/scorepy,rtfoley/scorepy,rtfoley/scorepy
|
---
+++
@@ -3,7 +3,6 @@
validators
-# TODO add vailidation
class TeamForm(Form):
number = IntegerField("Number", [validators.Required(),
validators.NumberRange(min=1, max=99999)])
@@ -14,6 +13,7 @@
state = TextField("State", [validators.Length(min=2, max=2)])
+# TODO add validation
class ScoreForm(Form):
team_id = SelectField(u'Team', coerce=int)
tree_branch_is_closer = BooleanField(default=False)
|
c1008646ad8eac1de30d0c1bfd95caebd66f7fa1
|
pax/plugins/peak_processing/ClassifyPeaks.py
|
pax/plugins/peak_processing/ClassifyPeaks.py
|
from pax import plugin, units
class AdHocClassification(plugin.TransformPlugin):
def transform_event(self, event):
for peak in event.peaks:
# Don't work on noise and lone_hit
if peak.type in ('unknown', 'lone_hit'):
continue
if peak.range_90p_area < 150 * units.ns:
peak.type = 's1'
elif peak.range_90p_area > 200 * units.ns:
if peak.area > 5:
peak.type = 's2'
else:
peak.type = 'coincidence'
return event
|
from pax import plugin, units
class AdHocClassification(plugin.TransformPlugin):
def transform_event(self, event):
for peak in event.peaks:
# Don't work on noise and lone_hit
if peak.type in ('noise', 'lone_hit'):
continue
if peak.range_90p_area < 150 * units.ns:
peak.type = 's1'
elif peak.range_90p_area > 200 * units.ns:
if peak.area > 5:
peak.type = 's2'
else:
peak.type = 'coincidence'
return event
|
Fix goof in classification fix
|
Fix goof in classification fix
|
Python
|
bsd-3-clause
|
XENON1T/pax,XENON1T/pax
|
---
+++
@@ -8,7 +8,7 @@
for peak in event.peaks:
# Don't work on noise and lone_hit
- if peak.type in ('unknown', 'lone_hit'):
+ if peak.type in ('noise', 'lone_hit'):
continue
if peak.range_90p_area < 150 * units.ns:
|
9e42514ac030cc2fa3aab89addfa48cf0342f991
|
kremlin/utils.py
|
kremlin/utils.py
|
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
import os
from PIL import Image
def mkthumb(fp, h=128, w=128):
""" Create a thumbnail for an image
fp filesystem path to the full size image
h height (default is 128)
w width (default is 128)
The thumbnail will be unceremoniously dumped in the same
directory with 'thumbnail' between the file name and extension.
"""
size = (h, w)
f, ext = os.path.splitext(fp)
im = Image.open(fp)
im.thumbnail(size, Image.ANTIALIAS)
im.save('.thumbnail'.join([f, ext]))
|
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
import os
from PIL import Image
def mkthumb(fp, h=128, w=128):
""" Create a thumbnail for an image
fp filesystem path to the full size image
h height (default is 128)
w width (default is 128)
The thumbnail will be unceremoniously dumped in the same
directory with 'thumbnail' between the file name and extension.
"""
size = (h, w)
f, ext = os.path.splitext(fp)
with Image.open(fp) as im:
im.thumbnail(size, Image.ANTIALIAS)
im.save('.thumbnail'.join([f, ext]))
|
Use context manager for PIL Open
|
Use context manager for PIL Open
This is currently kind of buggy with Python3. A harmless warning is
emitted since the handle is still closed late inside PIL/Pillow.
|
Python
|
bsd-2-clause
|
glasnost/kremlin,glasnost/kremlin,glasnost/kremlin
|
---
+++
@@ -28,6 +28,6 @@
size = (h, w)
f, ext = os.path.splitext(fp)
- im = Image.open(fp)
- im.thumbnail(size, Image.ANTIALIAS)
- im.save('.thumbnail'.join([f, ext]))
+ with Image.open(fp) as im:
+ im.thumbnail(size, Image.ANTIALIAS)
+ im.save('.thumbnail'.join([f, ext]))
|
12d2e8033c46f06f5442cba40a7c2c673196ad1c
|
akanda/horizon/routers/views.py
|
akanda/horizon/routers/views.py
|
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
ports = [api.quantum.Port(p) for p in router.ports]
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
|
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Right now we are listing, for both normal and
# admin users, all the ports on the user's networks
# the router is associated with. We may want in the
# future show the ports on the mgt and the external
# networks for the admin users.
ports = [api.quantum.Port(p) for p in router.ports
if p['device_owner'] == 'network:router_interface']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
|
Fix the router's interfaces listing view to show only the interfaces on the user's networks filtering out interfaces on the mgt and public networks.
|
Fix the router's interfaces listing view to show only the
interfaces on the user's networks filtering out interfaces
on the mgt and public networks.
DHC-1512
Change-Id: I9b68b75d5e8325c4c70090fa500a417e23b1836f
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com>
|
Python
|
apache-2.0
|
dreamhost/akanda-horizon,dreamhost/akanda-horizon
|
---
+++
@@ -8,7 +8,13 @@
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
- ports = [api.quantum.Port(p) for p in router.ports]
+ # Note(rods): Right now we are listing, for both normal and
+ # admin users, all the ports on the user's networks
+ # the router is associated with. We may want in the
+ # future show the ports on the mgt and the external
+ # networks for the admin users.
+ ports = [api.quantum.Port(p) for p in router.ports
+ if p['device_owner'] == 'network:router_interface']
except Exception:
ports = []
msg = _(
|
a2530b9cd2baf70591e377b6eb7d5104493989a1
|
test/conftest.py
|
test/conftest.py
|
def pytest_addoption(parser):
parser.addoption("--domain", action="append", default=[],
help="list of stringinputs to pass to test functions")
def pytest_generate_tests(metafunc):
if 'domain' in metafunc.fixturenames:
metafunc.parametrize("domain",
metafunc.config.option.domain)
|
def pytest_addoption(parser):
parser.addoption("--domain", action="append", default=[],
help="list of stringinputs to pass to test functions")
parser.addoption("--url", action="append", default=[],
help="list of stringinputs to pass to test functions")
def pytest_generate_tests(metafunc):
if 'domain' in metafunc.fixturenames:
metafunc.parametrize("domain",
metafunc.config.option.domain)
if 'url' in metafunc.fixturenames:
metafunc.parametrize("url",
metafunc.config.option.url)
|
Add test parameter for site url
|
Add test parameter for site url
|
Python
|
mit
|
HIIT/mediacollection
|
---
+++
@@ -1,9 +1,13 @@
def pytest_addoption(parser):
parser.addoption("--domain", action="append", default=[],
+ help="list of stringinputs to pass to test functions")
+ parser.addoption("--url", action="append", default=[],
help="list of stringinputs to pass to test functions")
def pytest_generate_tests(metafunc):
if 'domain' in metafunc.fixturenames:
metafunc.parametrize("domain",
metafunc.config.option.domain)
-
+ if 'url' in metafunc.fixturenames:
+ metafunc.parametrize("url",
+ metafunc.config.option.url)
|
e0af42d5431c42ee5e12fb228978ac8ce9c62807
|
server/modules/persistent.py
|
server/modules/persistent.py
|
#!/usr/bin/python3
import argparse
import os
import icon_lib
parser = argparse.ArgumentParser(description='iconograph persistent')
parser.add_argument(
'--chroot-path',
dest='chroot_path',
action='store',
required=True)
FLAGS = parser.parse_args()
def main():
module = icon_lib.IconModule(FLAGS.chroot_path)
os.mkdir(os.path.join(FLAGS.chroot_path, 'persistent'))
tool_path = os.path.join(FLAGS.chroot_path, 'icon', 'persistent')
os.makedirs(tool_path, exist_ok=True)
script = os.path.join(tool_path, 'startup.sh')
with open(script, 'w') as fh:
os.chmod(fh.fileno(), 0o755)
fh.write("""\
#!/bin/bash
set -ex
e2fsck -a /persistent
mount -o data=journal,noatime,sync LABEL=PERSISTENT /persistent
""")
with module.ServiceFile('persistent.service') as fh:
fh.write("""
[Unit]
Description=Mount /persistent
DefaultDependencies=no
Conflicts=shutdown.target
After=systemd-remount-fs.service
Before=sysinit.target
[Service]
Type=oneshot
RemainAfterExit=yes
ExecStart=/icon/persistent/startup.sh
[Install]
WantedBy=sysinit.target
""")
module.EnableService('persistent.service')
if __name__ == '__main__':
main()
|
#!/usr/bin/python3
import argparse
import os
import icon_lib
parser = argparse.ArgumentParser(description='iconograph persistent')
parser.add_argument(
'--chroot-path',
dest='chroot_path',
action='store',
required=True)
FLAGS = parser.parse_args()
def main():
module = icon_lib.IconModule(FLAGS.chroot_path)
os.mkdir(os.path.join(FLAGS.chroot_path, 'persistent'))
tool_path = os.path.join(FLAGS.chroot_path, 'icon', 'persistent')
os.makedirs(tool_path, exist_ok=True)
script = os.path.join(tool_path, 'startup.sh')
with open(script, 'w') as fh:
os.chmod(fh.fileno(), 0o755)
fh.write("""\
#!/bin/bash
set -ex
e2fsck -a /persistent
mount -o noatime LABEL=PERSISTENT /persistent
""")
with module.ServiceFile('persistent.service') as fh:
fh.write("""
[Unit]
Description=Mount /persistent
DefaultDependencies=no
Conflicts=shutdown.target
After=systemd-remount-fs.service
Before=sysinit.target
[Service]
Type=oneshot
RemainAfterExit=yes
ExecStart=/icon/persistent/startup.sh
[Install]
WantedBy=sysinit.target
""")
module.EnableService('persistent.service')
if __name__ == '__main__':
main()
|
Remove flags that drastically slow down writes
|
Remove flags that drastically slow down writes
|
Python
|
apache-2.0
|
robot-tools/iconograph,robot-tools/iconograph,robot-tools/iconograph,robot-tools/iconograph
|
---
+++
@@ -30,7 +30,7 @@
#!/bin/bash
set -ex
e2fsck -a /persistent
-mount -o data=journal,noatime,sync LABEL=PERSISTENT /persistent
+mount -o noatime LABEL=PERSISTENT /persistent
""")
with module.ServiceFile('persistent.service') as fh:
|
ec2092c683f721e32a2d1d9792f296e140d6ba45
|
paperwork_parser/exceptions.py
|
paperwork_parser/exceptions.py
|
class InvalidPDFError(ValueError):
pass
class FieldParseError(Exception):
pass
|
class InvalidPDFError(ValueError):
pass
class UnknownVariantError(Exception):
pass
class FieldParseError(Exception):
pass
|
Add new exception type for unknown variants
|
Add new exception type for unknown variants
|
Python
|
mit
|
loanzen/zen_document_parser
|
---
+++
@@ -3,5 +3,9 @@
pass
+class UnknownVariantError(Exception):
+ pass
+
+
class FieldParseError(Exception):
pass
|
0b49114a6b0830fa0b05d32803ae52526b8e48ca
|
gnsq/backofftimer.py
|
gnsq/backofftimer.py
|
from random import randint
class BackoffTimer(object):
def __init__(self, ratio=1, max_interval=None, min_interval=None):
self.c = 0
self.ratio = ratio
self.max_interval = max_interval
self.min_interval = min_interval
def is_reset(self):
return self.c == 0
def reset(self):
self.c = 0
return self
def success(self):
self.c = max(self.c - 1, 0)
return self
def failure(self):
self.c += 1
return self
def get_interval(self):
k = pow(2, self.c) - 1
interval = randint(0, k) * self.ratio
if self.max_interval is not None:
interval = min(interval, self.max_interval)
if self.min_interval is not None:
interval = max(interval, self.min_interval)
return interval
|
import random
class BackoffTimer(object):
def __init__(self, ratio=1, max_interval=None, min_interval=None):
self.c = 0
self.ratio = ratio
self.max_interval = max_interval
self.min_interval = min_interval
def is_reset(self):
return self.c == 0
def reset(self):
self.c = 0
return self
def success(self):
self.c = max(self.c - 1, 0)
return self
def failure(self):
self.c += 1
return self
def get_interval(self):
k = pow(2, self.c) - 1
interval = random.random() * k * self.ratio
if self.max_interval is not None:
interval = min(interval, self.max_interval)
if self.min_interval is not None:
interval = max(interval, self.min_interval)
return interval
|
Return float for interval instead of int.
|
Return float for interval instead of int.
|
Python
|
bsd-3-clause
|
hiringsolved/gnsq,wtolson/gnsq,wtolson/gnsq
|
---
+++
@@ -1,4 +1,4 @@
-from random import randint
+import random
class BackoffTimer(object):
@@ -26,7 +26,7 @@
def get_interval(self):
k = pow(2, self.c) - 1
- interval = randint(0, k) * self.ratio
+ interval = random.random() * k * self.ratio
if self.max_interval is not None:
interval = min(interval, self.max_interval)
|
1d74b003818e260ae1f453cb26f1c9efc29e8ba2
|
scripts/run_unit_test.py
|
scripts/run_unit_test.py
|
#!/usr/bin/env python
import serial
import os
import sys
import time
# Make and flash the unit test
FILE_LOCATION = os.path.dirname(os.path.abspath(__file__))
os.chdir(FILE_LOCATION + "/../")
print os.system("make flash_unit_test")
# Ask the user to reset the board
raw_input("\nPlease press the phsyical reset button on the STM32F4Discovery board and then press enter to continue...")
# Open a serial port
time.sleep(1)
print 'Connecting to /dev/serial/by-id/usb-eecs567_final_project-if00'
ser = serial.Serial("/dev/serial/by-id/usb-eecs567_final_project-if00", 115200)
# time.sleep(1)
# Send data to start USB OTG
print 'Write start'
ser.write("start")
print 'Run test'
# Read until we see the finished text
result = ''
try:
while True:
num_chars = ser.inWaiting()
if num_chars:
new = ''
try:
new = ser.read(num_chars)
except:
print '\nFailed to read'
sys.stdout.write(new)
result += new
if result.find("Finished") != -1:
break
finally:
# Print the result so the user can see and close the serial port
#print result
ser.close()
|
#!/usr/bin/env python
import serial
import os
import sys
import time
# Make and flash the unit test
FILE_LOCATION = os.path.dirname(os.path.abspath(__file__))
os.chdir(FILE_LOCATION + "/../")
print os.system("make flash_unit_test")
# Ask the user to reset the board
raw_input("""\nPlease press the phsyical reset button on
the STM32F4Discovery board and then press enter to continue...""")
# Open a serial port
time.sleep(1)
print 'Connecting to /dev/serial/by-id/usb-eecs567_final_project-if00'
ser = serial.Serial("/dev/serial/by-id/usb-eecs567_final_project-if00", 115200)
# time.sleep(1)
# Send data to start USB OTG
print 'Write start'
ser.write("start")
print 'Run test'
# Read until we see the finished text
result = ''
try:
while True:
num_chars = ser.inWaiting()
if num_chars:
new = ''
try:
new = ser.read(num_chars)
except:
print '\nFailed to read'
sys.stdout.write(new)
result += new
if result.find("Finished") != -1:
break
finally:
# Close the serial port
ser.close()
|
Bring scripts folder up to pep8 standards
|
PEP8: Bring scripts folder up to pep8 standards
|
Python
|
mit
|
fnivek/eecs567-final-project,fnivek/eecs567-final-project,fnivek/eecs567-final-project,fnivek/eecs567-final-project,fnivek/eecs567-final-project
|
---
+++
@@ -6,12 +6,13 @@
import time
# Make and flash the unit test
-FILE_LOCATION = os.path.dirname(os.path.abspath(__file__))
+FILE_LOCATION = os.path.dirname(os.path.abspath(__file__))
os.chdir(FILE_LOCATION + "/../")
print os.system("make flash_unit_test")
# Ask the user to reset the board
-raw_input("\nPlease press the phsyical reset button on the STM32F4Discovery board and then press enter to continue...")
+raw_input("""\nPlease press the phsyical reset button on
+ the STM32F4Discovery board and then press enter to continue...""")
# Open a serial port
time.sleep(1)
@@ -40,6 +41,5 @@
if result.find("Finished") != -1:
break
finally:
- # Print the result so the user can see and close the serial port
- #print result
+ # Close the serial port
ser.close()
|
515855105be9d26ad1a272177a64604e0fa547a5
|
wait_for_agent_update.py
|
wait_for_agent_update.py
|
#!/usr/bin/env python
__metaclass__ = type
from jujupy import (
check_wordpress,
Environment,
format_listing,
until_timeout,
)
from collections import defaultdict
import sys
def agent_update(environment, version):
env = Environment(environment)
for ignored in until_timeout(30):
versions = defaultdict(list)
status = env.get_status()
for item_name, item in env.agent_items(status):
versions[item.get('agent-version', 'unknown')].append(item_name)
if versions.keys() == [version]:
break
print format_listing(versions, version)
sys.stdout.flush()
else:
raise Exception('Some versions did not update.')
def main():
try:
agent_update(sys.argv[1], sys.argv[2])
except Exception as e:
print e
sys.exit(1)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
__metaclass__ = type
from jujupy import (
check_wordpress,
Environment,
format_listing,
until_timeout,
)
from collections import defaultdict
import sys
def agent_update(environment, version):
env = Environment(environment)
for ignored in until_timeout(300):
versions = defaultdict(list)
status = env.get_status()
for item_name, item in env.agent_items(status):
versions[item.get('agent-version', 'unknown')].append(item_name)
if versions.keys() == [version]:
break
print format_listing(versions, version)
sys.stdout.flush()
else:
raise Exception('Some versions did not update.')
def main():
try:
agent_update(sys.argv[1], sys.argv[2])
except Exception as e:
print e
sys.exit(1)
if __name__ == '__main__':
main()
|
Increase timeout for agent update to 5 minutes.
|
Increase timeout for agent update to 5 minutes.
|
Python
|
agpl-3.0
|
mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju
|
---
+++
@@ -13,7 +13,7 @@
def agent_update(environment, version):
env = Environment(environment)
- for ignored in until_timeout(30):
+ for ignored in until_timeout(300):
versions = defaultdict(list)
status = env.get_status()
for item_name, item in env.agent_items(status):
|
d498a9846567e4986ba2a2541b2b4e4719c2c83f
|
keras/__init__.py
|
keras/__init__.py
|
from __future__ import absolute_import
from . import activations
from . import applications
from . import backend
from . import datasets
from . import engine
from . import layers
from . import preprocessing
from . import utils
from . import wrappers
from . import callbacks
from . import constraints
from . import initializers
from . import metrics
from . import models
from . import losses
from . import optimizers
from . import regularizers
__version__ = '2.0.3'
|
from __future__ import absolute_import
from . import activations
from . import applications
from . import backend
from . import datasets
from . import engine
from . import layers
from . import preprocessing
from . import utils
from . import wrappers
from . import callbacks
from . import constraints
from . import initializers
from . import metrics
from . import models
from . import losses
from . import optimizers
from . import regularizers
# Importable from root because it's technically not a layer
from .layers import Input
__version__ = '2.0.3'
|
Make Input importable from root
|
Make Input importable from root
|
Python
|
apache-2.0
|
keras-team/keras,keras-team/keras
|
---
+++
@@ -17,5 +17,7 @@
from . import losses
from . import optimizers
from . import regularizers
+# Importable from root because it's technically not a layer
+from .layers import Input
__version__ = '2.0.3'
|
727b94078ab15d04a65adbd57fe6962949bda97e
|
openacademy/model/openacademy_course.py
|
openacademy/model/openacademy_course.py
|
from openerp import models, fields, api
class Course(models.Model):
'''
This class creates a model for courses
'''
_name = 'openacademey.course'
name = fields.Char(string='Title', required=True)
description = fields.Text(string='Description')
|
from openerp import models, fields, api
class Course(models.Model):
'''
This class creates a model for courses
'''
_name = 'openacademy.course'
name = fields.Char(string='Title', required=True)
description = fields.Text(string='Description')
|
Fix typo in Course table name
|
Fix typo in Course table name
|
Python
|
mit
|
tebanep/odoo_training_addons
|
---
+++
@@ -5,7 +5,7 @@
'''
This class creates a model for courses
'''
- _name = 'openacademey.course'
+ _name = 'openacademy.course'
name = fields.Char(string='Title', required=True)
description = fields.Text(string='Description')
|
88e99caf6c426cdee602157f9aee120ecf822cad
|
docs/conf.py
|
docs/conf.py
|
import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
Set readthedocs theme in docs build
|
Set readthedocs theme in docs build
|
Python
|
bsd-3-clause
|
lamby/django-slack
|
---
+++
@@ -17,6 +17,7 @@
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
+html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
|
8e20b56c4b91f673429697232926115db46e0c2d
|
spanky/commands/cmd_users.py
|
spanky/commands/cmd_users.py
|
import click
from spanky.cli import pass_context
from spanky.lib.users import UserInit
@click.command('users', short_help='creates users base on /etc/spanky/users')
@pass_context
def cli(ctx):
config = ctx.config.load('users.yml')()
user_init = UserInit(config)
user_init.build()
|
import sys
import click
from spanky.cli import pass_context
from spanky.lib.users import UserInit
@click.command('users', short_help='creates users base on /etc/spanky/users')
@pass_context
def cli(ctx):
try:
config = ctx.config.load('users.yml')()
except IOError:
# no config lets bail
click.echo('No users to install')
sys.exit(1)
user_init = UserInit(config)
user_init.build()
|
Return a 1 when we don't have user to install.
|
Return a 1 when we don't have user to install.
TODO: Make better return code
|
Python
|
bsd-3-clause
|
pglbutt/spanky,pglbutt/spanky,pglbutt/spanky
|
---
+++
@@ -1,4 +1,7 @@
+import sys
+
import click
+
from spanky.cli import pass_context
from spanky.lib.users import UserInit
@@ -6,6 +9,12 @@
@click.command('users', short_help='creates users base on /etc/spanky/users')
@pass_context
def cli(ctx):
- config = ctx.config.load('users.yml')()
+ try:
+ config = ctx.config.load('users.yml')()
+ except IOError:
+ # no config lets bail
+ click.echo('No users to install')
+ sys.exit(1)
+
user_init = UserInit(config)
user_init.build()
|
9970faa24d9f27817c7e9c9e88869bb415d6926c
|
main/__init__.py
|
main/__init__.py
|
from flask import Flask
from config import config
from . import extensions, modules
def create_app(config_name):
config_obj = config[config_name]()
app = Flask(__name__, static_url_path='/static')
# Initializes configuration values.
app.config.from_object(config_obj)
# Configure SSL if the current platform supports it.
if not app.debug and not app.testing and not app.config.get('SSL_DISABLE'):
from flask_sslify import SSLify
SSLify(app)
# Initializes Flask extensions.
extensions.init_app(app)
# Initializes modules.
modules.init_app(app)
return app
|
from flask import Flask, redirect, request
from config import config
from . import extensions, modules
def create_app(config_name):
config_obj = config[config_name]()
app = Flask(__name__, static_url_path='/static')
# Initializes configuration values.
app.config.from_object(config_obj)
# Configure SSL if the current platform supports it.
if not app.debug and not app.testing and not app.config.get('SSL_DISABLE'):
from flask_sslify import SSLify
SSLify(app)
@app.before_request
def redirect_www():
""" Redirects www requests to non-www. """
if request.host.startswith('www.'):
new_host = request.host[4:]
return redirect(f"{request.scheme}://{new_host}/", code=301)
# Initializes Flask extensions.
extensions.init_app(app)
# Initializes modules.
modules.init_app(app)
return app
|
Add before_request callback to handle www redirects
|
Add before_request callback to handle www redirects
|
Python
|
mit
|
ellmetha/morganaubert-resume,ellmetha/morganaubert-resume,ellmetha/morganaubert-resume,ellmetha/morganaubert-resume
|
---
+++
@@ -1,4 +1,4 @@
-from flask import Flask
+from flask import Flask, redirect, request
from config import config
@@ -17,6 +17,13 @@
from flask_sslify import SSLify
SSLify(app)
+ @app.before_request
+ def redirect_www():
+ """ Redirects www requests to non-www. """
+ if request.host.startswith('www.'):
+ new_host = request.host[4:]
+ return redirect(f"{request.scheme}://{new_host}/", code=301)
+
# Initializes Flask extensions.
extensions.init_app(app)
|
d0e139d286b18c9dcdc8c46161c4ebdf0f0f8d96
|
examples/cooperative_binding.py
|
examples/cooperative_binding.py
|
import sys
import os
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.realpath(__file__)), '..'))
from crnpy.crn import CRN, from_react_file
__author__ = "Elisa Tonello"
__copyright__ = "Copyright (c) 2016, Elisa Tonello"
__license__ = "BSD"
__version__ = "0.0.1"
# Cooperative binding
print "Creating model..."
crn = from_react_file("data/reactions/cooperative_binding")
crn.inspect(True)
print
print("Remove ps1, ps2 and ps3 by qss")
crn.remove(qss = ['ps1', 'ps2', 'ps3'], debug = True)
for s, f in crn.removed_species: print(s + " = " + str(f))
crn.inspect(True)
|
import sys
import os
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.realpath(__file__)), '..'))
from crnpy.crn import CRN, from_react_file
__author__ = "Elisa Tonello"
__copyright__ = "Copyright (c) 2016, Elisa Tonello"
__license__ = "BSD"
__version__ = "0.0.1"
# Cooperative binding
print "Creating model..."
crn = from_react_file("data/reactions/cooperative_binding")
crn.inspect(True)
print("")
print("Remove ps1, ps2 and ps3 by qssa")
crn.remove(qss = ['ps1', 'ps2', 'ps3'])
for s, f in crn.removed_species: print(s + " = " + str(f))
crn.inspect(True)
|
Remove debug and adjusted print.
|
Remove debug and adjusted print.
|
Python
|
bsd-3-clause
|
etonello/crnpy
|
---
+++
@@ -16,9 +16,9 @@
crn = from_react_file("data/reactions/cooperative_binding")
crn.inspect(True)
-print
+print("")
-print("Remove ps1, ps2 and ps3 by qss")
-crn.remove(qss = ['ps1', 'ps2', 'ps3'], debug = True)
+print("Remove ps1, ps2 and ps3 by qssa")
+crn.remove(qss = ['ps1', 'ps2', 'ps3'])
for s, f in crn.removed_species: print(s + " = " + str(f))
crn.inspect(True)
|
67d3193683d2215fdd660bdc086801fe761c7db7
|
src/views.py
|
src/views.py
|
from flask import render_template
from app import app
@app.route('/')
def index():
return render_template('index.html', active='index')
@app.route('/contact/')
def contact():
return render_template('contact.html', active='contact')
@app.context_processor
def utility_processor():
def page_title(title=None):
return "{} | {}".format(title, app.config['SITE_TITLE']) if title \
else app.config['SITE_TITLE']
def post_source(path):
return '{}{}{}'.format(app.config['POST_SOURCE_ROOT'],
path,
app.config['FLATPAGES_EXTENSION'])
return dict(page_title=page_title, post_source=post_source)
@app.template_filter('date')
def date_filter(date):
return date.strftime('%B %-d, %Y')
|
import os
from flask import render_template
from flask import send_from_directory
from app import app
@app.route('/')
def index():
return render_template('index.html', active='index')
@app.route('/contact/')
def contact():
return render_template('contact.html', active='contact')
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'),
'favicon.ico',
mimetype='image/vnd.microsoft.icon')
@app.context_processor
def utility_processor():
def page_title(title=None):
return "{} | {}".format(title, app.config['SITE_TITLE']) if title \
else app.config['SITE_TITLE']
def post_source(path):
return '{}{}{}'.format(app.config['POST_SOURCE_ROOT'],
path,
app.config['FLATPAGES_EXTENSION'])
return dict(page_title=page_title, post_source=post_source)
@app.template_filter('date')
def date_filter(date):
return date.strftime('%B %-d, %Y')
|
Make the favicon available at /favicon.ico
|
Make the favicon available at /favicon.ico
|
Python
|
mit
|
matachi/MaTachi.github.io,matachi/MaTachi.github.io
|
---
+++
@@ -1,5 +1,6 @@
+import os
from flask import render_template
-
+from flask import send_from_directory
from app import app
@app.route('/')
@@ -9,6 +10,12 @@
@app.route('/contact/')
def contact():
return render_template('contact.html', active='contact')
+
+@app.route('/favicon.ico')
+def favicon():
+ return send_from_directory(os.path.join(app.root_path, 'static'),
+ 'favicon.ico',
+ mimetype='image/vnd.microsoft.icon')
@app.context_processor
def utility_processor():
|
0cda8950a661a0d994d7b5824af949ec1e40f584
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name="zutil",
version='0.1.4',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
url="https://zcfd.zenotech.com/",
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'mpi4py',
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
)
|
from distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
url="https://zcfd.zenotech.com/",
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
|
Make mpi an optional dependency
|
Make mpi an optional dependency
|
Python
|
mit
|
zCFD/zutil
|
---
+++
@@ -1,14 +1,13 @@
from distutils.core import setup
setup(name="zutil",
- version='0.1.4',
+ version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
url="https://zcfd.zenotech.com/",
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
- 'mpi4py',
'ipython<6.0',
'Fabric',
'ipywidgets',
@@ -16,5 +15,8 @@
'numpy',
'pandas',
'PyYAML'
- ],
+ ],
+ extras_require={
+ "mpi": ["mpi4py"]
+ }
)
|
334ccd245997373c4ddc21c03657339c4ca20192
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
settings = dict()
# Publish
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
settings.update(
name='whenpy',
version='0.1.0',
description='Friendly Dates and Times',
long_description=open('README.rst').read(),
author='Andy Dirnberger',
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
setup(**settings)
|
Add LICENSE to package data
|
Add LICENSE to package data
The LICENSE file isn't included with the version found on PyPI. Including it in the `package_data` argument passed to `setup` should fix this.
|
Python
|
bsd-3-clause
|
dirn/When.py
|
---
+++
@@ -25,6 +25,8 @@
author_email='dirn@dirnonline.com',
url='https://github.com/dirn/when.py',
packages=['when'],
+ package_data={'': ['LICENSE']},
+ include_package_data=True,
install_requires=['pytz'],
license=open('LICENSE').read(),
classifiers=(
|
55851fc7fadd4b6a0241e63e6b708f9b5d4e253d
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="django-service-rating-store",
version="0.3.0",
url='https://github.com/praekelt/django-service-rating-store',
license='BSD',
description=(
"Django app that allows storage and visualisation of Service Rating data posted via REST API"),
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='devops@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django',
'django-tastypie',
'South',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
|
from setuptools import setup, find_packages
setup(
name="django-service-rating-store",
version="0.3.0a",
url='https://github.com/praekelt/django-service-rating-store',
license='BSD',
description=(
"Django app that allows storage and visualisation of Service Rating data posted via REST API"),
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='devops@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django',
'django-tastypie',
'South',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
|
Bump to 0.3.0a post release
|
Bump to 0.3.0a post release
|
Python
|
bsd-3-clause
|
praekelt/django-service-rating-store
|
---
+++
@@ -2,7 +2,7 @@
setup(
name="django-service-rating-store",
- version="0.3.0",
+ version="0.3.0a",
url='https://github.com/praekelt/django-service-rating-store',
license='BSD',
description=(
|
845ed2a406baf333cd289f789aed0089b3ec3be9
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
Update the PyPI version to 7.0.1.
|
Update the PyPI version to 7.0.1.
|
Python
|
mit
|
Doist/todoist-python
|
---
+++
@@ -10,7 +10,7 @@
setup(
name='todoist-python',
- version='7.0',
+ version='7.0.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
|
e00f59a20c3efa6a8db307f482d54a99f5b1f643
|
setup.py
|
setup.py
|
import os
import sys
from setuptools import setup
# Utility function to read the README file.
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# The packages we depend on
dependencies = [
"Flask==0.9",
"MySQL-python==1.2.4c1"
]
# If old Python, then we need simplejson
if sys.version_info < (2,6):
dependencies += ["simplejson>=2.6.2"]
setup(
name = "pegasus-metrics",
version = "0.1",
author = "Gideon Juve",
author_email = "gideon@isi.edu",
description = "Anonymous usage metrics collection and reporting for Pegasus",
long_description = read("README.md"),
license = "Apache2",
url = "https://github.com/pegasus-isi/pegasus-metrics",
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
packages = ["pegasus","pegasus.metrics", "pegasus.metrics"],
include_package_data = True,
zip_safe = False,
scripts = ["bin/pegasus-metrics-server"],
install_requires = dependencies
)
|
import os
import sys
from setuptools import setup
# Utility function to read the README file.
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# The packages we depend on
dependencies = [
"Flask==0.9",
"MySQL-python==1.2.4c1"
]
# If old Python, then we need simplejson
if sys.version_info < (2,6):
dependencies += ["simplejson>=2.6.2"]
setup(
name = "pegasus-metrics",
version = "0.1",
author = "Gideon Juve",
author_email = "gideon@isi.edu",
description = "Anonymous usage metrics collection and reporting for Pegasus",
long_description = read("README.md"),
license = "Apache2",
url = "https://github.com/pegasus-isi/pegasus-metrics",
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
packages = ["pegasus","pegasus.metrics"],
package_data = {"pegasus.metrics" : ["templates/*", "static/*"] },
include_package_data = True,
zip_safe = False,
scripts = ["bin/pegasus-metrics-server"],
install_requires = dependencies
)
|
Include templates and static data in packaging
|
Include templates and static data in packaging
|
Python
|
apache-2.0
|
pegasus-isi/pegasus-metrics,pegasus-isi/pegasus-metrics,pegasus-isi/pegasus-metrics
|
---
+++
@@ -30,7 +30,8 @@
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
- packages = ["pegasus","pegasus.metrics", "pegasus.metrics"],
+ packages = ["pegasus","pegasus.metrics"],
+ package_data = {"pegasus.metrics" : ["templates/*", "static/*"] },
include_package_data = True,
zip_safe = False,
scripts = ["bin/pegasus-metrics-server"],
|
c3ad61a642e0e1149ae7ea1e8bde2efa6bbc9263
|
setup.py
|
setup.py
|
from distutils.core import setup
from setuptools import find_packages
with open('README.md') as fp:
long_description = fp.read()
setup(
name='sendwithus',
version='1.6.6',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=long_description,
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0",
"six >= 1.9.0"
],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
|
from distutils.core import setup
from setuptools import find_packages
with open('README.md') as fp:
long_description = fp.read()
setup(
name='sendwithus',
version='1.6.6',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=long_description,
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0",
"six >= 1.9.0"
],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
|
Update the classifier for all the tested versions of python
|
Update the classifier for all the tested versions of python
|
Python
|
apache-2.0
|
mefyl/sendwithus_python,sendwithus/sendwithus_python
|
---
+++
@@ -23,6 +23,9 @@
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
|
9f50db5179886770178a07a1167b40f3d17ccbd9
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development",
"Topic :: Software Development :: Version Control",
"Topic :: Text Processing :: Filters",
]
setup(
name='diff-highlight',
version='0.1.0',
description='pretty diff highlighter; emphasis changed words in diff',
long_description=open("README.rst").read(),
classifiers=classifiers,
keywords=['mercurial', 'git', 'diff', 'highlight'],
author='Takeshi Komiya',
author_email='i.tkomiya at gmail.com',
url='http://blockdiag.com/',
download_url='http://pypi.python.org/pypi/diff-highlight',
license='Apache License 2.0',
py_modules=['diff_highlight'],
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
entry_points="""
[console_scripts]
diff-highlight = highlights.command:highlight_main
"""
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.4",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development",
"Topic :: Software Development :: Version Control",
"Topic :: Text Processing :: Filters",
]
setup(
name='diff-highlight',
version='0.1.0',
description='pretty diff highlighter; emphasis changed words in diff',
long_description=open("README.rst").read(),
classifiers=classifiers,
keywords=['mercurial', 'git', 'diff', 'highlight'],
author='Takeshi Komiya',
author_email='i.tkomiya at gmail.com',
url='http://blockdiag.com/',
download_url='http://pypi.python.org/pypi/diff-highlight',
license='Apache License 2.0',
py_modules=['diff_highlight'],
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
entry_points="""
[console_scripts]
diff-highlight = highlights.command:highlight_main
"""
)
|
Add py24 and py25 to classifiers
|
Add py24 and py25 to classifiers
|
Python
|
apache-2.0
|
tk0miya/diff-highlight
|
---
+++
@@ -6,6 +6,8 @@
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
+ "Programming Language :: Python :: 2.4",
+ "Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development",
|
20c0466c951864da74b250c0eeb907ad1dc209b9
|
setup.py
|
setup.py
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
Increment minor version once again
|
Increment minor version once again
|
Python
|
bsd-3-clause
|
consbio/parserutils
|
---
+++
@@ -28,7 +28,7 @@
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
- version='1.2.1',
+ version='1.2.2',
packages=[
'parserutils', 'parserutils.tests'
],
|
ea9847a31eb4441415f1c11ddf57056f206fc375
|
reverse-engineering/reveng.py
|
reverse-engineering/reveng.py
|
from os.path import dirname
from puresnmp.pdu import PDU
from puresnmp.test import readbytes_multiple
from puresnmp.x690.types import pop_tlv
HERE = dirname(__file__)
for row in readbytes_multiple('authpriv.hex', HERE):
print(row)
pdu, _ = pop_tlv(row)
print(pdu.pretty())
|
from os.path import dirname
HERE = dirname(__file__)
from puresnmp.pdu import PDU
from puresnmp.test import readbytes_multiple
from puresnmp.x690.types import pop_tlv
for row in readbytes_multiple("authpriv.hex", HERE):
print(row)
pdu, _ = pop_tlv(row)
print(pdu.pretty())
|
Add some files for reverse-engineering
|
Add some files for reverse-engineering
|
Python
|
mit
|
exhuma/puresnmp,exhuma/puresnmp
|
---
+++
@@ -1,12 +1,12 @@
from os.path import dirname
+
+HERE = dirname(__file__)
from puresnmp.pdu import PDU
from puresnmp.test import readbytes_multiple
from puresnmp.x690.types import pop_tlv
-HERE = dirname(__file__)
-
-for row in readbytes_multiple('authpriv.hex', HERE):
+for row in readbytes_multiple("authpriv.hex", HERE):
print(row)
pdu, _ = pop_tlv(row)
print(pdu.pretty())
|
e893a860f4a8ad9682f400507948ee20fce1c328
|
healthcheck/contrib/django/status_endpoint/views.py
|
healthcheck/contrib/django/status_endpoint/views.py
|
import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse, HttpResponseServerError
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(
settings, 'STATUS_CHECK_FILES')
if files_to_check:
checks.append(
FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if not ok:
return HttpResponseServerError((json.dumps(details)))
return HttpResponse(json.dumps(details))
|
import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES')
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
|
Fix content_type for JSON responses
|
Fix content_type for JSON responses
|
Python
|
mit
|
yola/healthcheck
|
---
+++
@@ -2,10 +2,21 @@
from django.conf import settings
from django.views.decorators.http import require_http_methods
-from django.http import HttpResponse, HttpResponseServerError
+from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
+
+
+class JsonResponse(HttpResponse):
+ def __init__(self, data, **kwargs):
+ kwargs.setdefault('content_type', 'application/json')
+ data = json.dumps(data)
+ super(JsonResponse, self).__init__(content=data, **kwargs)
+
+
+class JsonResponseServerError(JsonResponse):
+ status_code = 500
@require_http_methods(['GET'])
@@ -15,16 +26,14 @@
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
- files_to_check = getattr(
- settings, 'STATUS_CHECK_FILES')
+ files_to_check = getattr(settings, 'STATUS_CHECK_FILES')
if files_to_check:
- checks.append(
- FilesDontExistHealthCheck(
- files_to_check, check_id="quiesce file doesn't exist"))
+ checks.append(FilesDontExistHealthCheck(
+ files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if not ok:
- return HttpResponseServerError((json.dumps(details)))
+ return JsonResponseServerError(json.dumps(details))
- return HttpResponse(json.dumps(details))
+ return JsonResponse(details)
|
14a59d3b1d440d20c1df9b6e43c45657d8a60774
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
from setuptools.command.test import test
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
import os
here = os.path.dirname(os.path.abspath(__file__))
f = open(os.path.join(here, 'README.rst'))
long_description = f.read().strip()
f.close()
setup(
name='django-common-helpers',
version='0.1',
author='Sumit Chachra',
author_email='chachra@tivix.com',
url='http://github.com/tivix/django-common',
description = 'Common things every Django app needs!',
packages=find_packages(),
long_description=long_description,
keywords = 'django',
zip_safe=False,
install_requires=[
'Django>=1.2.3',
'South>=0.7.2'
],
# test_suite = 'django_common.tests',
include_package_data=True,
# cmdclass={},
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
from setuptools.command.test import test
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
import os
here = os.path.dirname(os.path.abspath(__file__))
f = open(os.path.join(here, 'README.rst'))
long_description = f.read().strip()
f.close()
setup(
name='django-common-tivix',
version='0.1.2',
author='Matthew Farver',
author_email='mfarver@tivix.com',
url='http://github.com/tivix/django-common',
description = 'Common things every Django app needs!',
packages=find_packages(),
long_description=long_description,
keywords = 'django',
zip_safe=False,
install_requires=[
'Django>=1.2.3',
'South>=0.7.2'
],
# test_suite = 'django_common.tests',
include_package_data=True,
# cmdclass={},
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
Change it to upload to pypi
|
Change it to upload to pypi
|
Python
|
mit
|
Tivix/django-common,Tivix/django-common,WikiRealtyInc/django-common,WikiRealtyInc/django-common,Tivix/django-common,WikiRealtyInc/django-common
|
---
+++
@@ -18,10 +18,10 @@
f.close()
setup(
- name='django-common-helpers',
- version='0.1',
- author='Sumit Chachra',
- author_email='chachra@tivix.com',
+ name='django-common-tivix',
+ version='0.1.2',
+ author='Matthew Farver',
+ author_email='mfarver@tivix.com',
url='http://github.com/tivix/django-common',
description = 'Common things every Django app needs!',
packages=find_packages(),
|
cf49c86c6e0966061ab7756a9922c12430ade35f
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
setup(
version='0.4.0',
name='incuna-groups',
packages=find_packages(),
include_package_data=True,
install_requires=[
'django_crispy_forms==1.4.0',
'django-polymorphic==0.6.1',
],
description='Generic group/forum framework.',
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/incuna-groups',
)
|
from setuptools import find_packages, setup
setup(
version='0.4.0',
name='incuna-groups',
packages=find_packages(),
include_package_data=True,
install_requires=[
'django_crispy_forms>=1.4.0,<2',
'django-polymorphic>=0.6.1,<1',
],
description='Generic group/forum framework.',
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/incuna-groups',
)
|
Allow wider range of dependencies
|
Allow wider range of dependencies
|
Python
|
bsd-2-clause
|
incuna/incuna-groups,incuna/incuna-groups
|
---
+++
@@ -7,8 +7,8 @@
packages=find_packages(),
include_package_data=True,
install_requires=[
- 'django_crispy_forms==1.4.0',
- 'django-polymorphic==0.6.1',
+ 'django_crispy_forms>=1.4.0,<2',
+ 'django-polymorphic>=0.6.1,<1',
],
description='Generic group/forum framework.',
author='Incuna Ltd',
|
710dbd42142f4f3ea0f02b1924f518b3cb38b79f
|
setup.py
|
setup.py
|
"""Setuptools file for a MultiMarkdown Python wrapper."""
from codecs import open
from os import path
from distutils.core import setup
from setuptools import find_packages
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='scriptorium',
version='2.0.0',
description='Multimarkdown and LaTeX framework for academic papers.',
long_description=long_description,
license='MIT',
author='Jason Ziglar',
author_email='jasedit@gmail.com',
url="https://github.com/jasedit/scriptorium",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Markup',
'Topic :: Text Processing :: Filters',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3'
],
packages=find_packages(),
entry_points = {
'console_scripts': ['scriptorium = scriptorium:main'],
},
package_data={'scriptorium': ['data/gitignore']}
)
|
"""Setuptools file for a MultiMarkdown Python wrapper."""
from codecs import open
from os import path
from distutils.core import setup
from setuptools import find_packages
import pypandoc
here = path.abspath(path.dirname(__file__))
long_description = pypandoc.convert_file('README.md', 'rst')
setup(
name='scriptorium',
version='2.0.1',
description='Multimarkdown and LaTeX framework for academic papers.',
long_description=long_description,
license='MIT',
author='Jason Ziglar',
author_email='jasedit@gmail.com',
url="https://github.com/jasedit/scriptorium",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Markup',
'Topic :: Text Processing :: Filters',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3'
],
packages=find_packages(),
entry_points = {
'console_scripts': ['scriptorium = scriptorium:main'],
},
package_data={'scriptorium': ['data/gitignore']}
)
|
Convert README to Restructured Text for distribution.
|
Convert README to Restructured Text for distribution.
|
Python
|
mit
|
jasedit/scriptorium,jasedit/papers_base
|
---
+++
@@ -3,15 +3,15 @@
from os import path
from distutils.core import setup
from setuptools import find_packages
+import pypandoc
here = path.abspath(path.dirname(__file__))
-with open(path.join(here, 'README.md'), encoding='utf-8') as f:
- long_description = f.read()
+long_description = pypandoc.convert_file('README.md', 'rst')
setup(
name='scriptorium',
- version='2.0.0',
+ version='2.0.1',
description='Multimarkdown and LaTeX framework for academic papers.',
long_description=long_description,
license='MIT',
@@ -19,7 +19,7 @@
author_email='jasedit@gmail.com',
url="https://github.com/jasedit/scriptorium",
classifiers=[
- 'Development Status :: 3 - Alpha',
+ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Markup',
'Topic :: Text Processing :: Filters',
|
9eb09bcece4a92f06c55078d2cd0e19e872c2794
|
setup.py
|
setup.py
|
import astral
import os
from setuptools import find_packages, setup
ROOT_PATH = os.path.abspath(os.path.dirname(__file__))
long_description = open(os.path.join(ROOT_PATH, 'README.rst')).read()
setup(name='astral',
version=astral.__version__,
description='Astral Streaming P2P Client',
long_description=long_description,
author='Astral Project Group',
author_email='astral@bueda.com',
url='http://github.com/peplin/astral',
test_suite='nose.collector',
setup_requires=['nose>=0.11',],
test_requires=['unittest2>=0.5.1',
'mockito==0.5.1',
'python-faker==0.2.3',
'factory-boy==1.0.0',],
install_requires=[
'tornado>=1.2.1',
'importlib>=1.0.2',
'sqlalchemy>=0.6.6',
'Elixir>=0.7.1',
'restkit>=3.2.0',
],
packages=find_packages(),
entry_points={
'console_scripts': [
'astralnode = astral.bin.astralnode:main',],
},
)
|
import astral
import os
from setuptools import find_packages, setup
ROOT_PATH = os.path.abspath(os.path.dirname(__file__))
long_description = open(os.path.join(ROOT_PATH, 'README.rst')).read()
setup(name='astral',
version=astral.__version__,
description='Astral Streaming P2P Client',
long_description=long_description,
author='Astral Project Group',
author_email='astral@bueda.com',
url='http://github.com/peplin/astral',
test_suite='nose.collector',
setup_requires=['nose>=0.11',],
test_requires=['unittest2>=0.5.1',
'mockito==0.5.1',
'python-faker==0.2.3',
'factory-boy==1.0.0',],
install_requires=[
'tornado>=1.2.1',
'importlib>=1.0.2',
'sqlalchemy>=0.6.6',
'Elixir>=0.7.1',
'restkit>=3.2.0',
],
packages=find_packages(),
entry_points={
'console_scripts': [
'astralnode = astral.bin.astralnode:main',
'astralctl = astral.bin.astralctl:main',
],
},
)
|
Add astralctl script to console scripts section of package config.
|
Add astralctl script to console scripts section of package config.
|
Python
|
mit
|
peplin/astral
|
---
+++
@@ -28,6 +28,8 @@
packages=find_packages(),
entry_points={
'console_scripts': [
- 'astralnode = astral.bin.astralnode:main',],
+ 'astralnode = astral.bin.astralnode:main',
+ 'astralctl = astral.bin.astralctl:main',
+ ],
},
)
|
3ed73321bb70817faa87d31ec8b588857b204634
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='pypif',
version='2.0.1',
url='http://github.com/CitrineInformatics/pypif',
description='Python tools for working with the Physical Information File (PIF)',
author='Kyle Michel',
author_email='kyle@citrine.io',
packages=find_packages(),
install_requires=[
'six>=1.10.0,<2'
])
|
from setuptools import setup, find_packages
setup(name='pypif',
version='2.1.0',
url='http://github.com/CitrineInformatics/pypif',
description='Python tools for working with the Physical Information File (PIF)',
author='Kyle Michel',
author_email='kyle@citrine.io',
packages=find_packages(),
install_requires=[
'six>=1.10.0,<2'
])
|
Add url field to Person object
|
Add url field to Person object
|
Python
|
apache-2.0
|
CitrineInformatics/pypif
|
---
+++
@@ -1,7 +1,7 @@
from setuptools import setup, find_packages
setup(name='pypif',
- version='2.0.1',
+ version='2.1.0',
url='http://github.com/CitrineInformatics/pypif',
description='Python tools for working with the Physical Information File (PIF)',
author='Kyle Michel',
|
b6b8c8e12b18515591c0b053cac9e515314b819e
|
setup.py
|
setup.py
|
"""
setup script
"""
from setuptools import setup, find_packages
_VERSION = '0.7'
setup(
name='jut-tools',
version=_VERSION,
author='Rodney Gomes',
author_email='rodney@jut.io',
url='https://github.com/jut-io/jut-python-tools',
download_url='https://github.com/jut-io/jut-python-tools/tarball/%s' % _VERSION,
install_requires=[
'requests==2.7.0',
'websocket-client==0.32.0',
'memoized==0.2',
'tabulate==0.7.5'
],
test_suite='tests',
keywords=[''],
packages=find_packages(exclude=['tests']),
license='MIT License',
description='jut command line tools',
# pypi doesn't support markdown so we can't push the README.md as is
long_description='https://github.com/jut-io/jut-python-tools/blob/master/README.md',
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jut = jut.cli:main'
]
},
)
|
"""
setup script
"""
from setuptools import setup, find_packages
_VERSION = '0.7'
setup(
name='jut-tools',
version=_VERSION,
author='Rodney Gomes',
author_email='rodney@jut.io',
url='https://github.com/jut-io/jut-python-tools',
download_url='https://github.com/jut-io/jut-python-tools/tarball/%s' % _VERSION,
install_requires=[
'requests==2.7.0',
'websocket-client==0.32.0',
'memoized==0.2',
'tabulate==0.7.5'
],
test_suite='tests',
tests_install=[
'sh==1.11'
],
keywords=[''],
packages=find_packages(exclude=['tests']),
license='MIT License',
description='jut command line tools',
# pypi doesn't support markdown so we can't push the README.md as is
long_description='https://github.com/jut-io/jut-python-tools/blob/master/README.md',
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jut = jut.cli:main'
]
},
)
|
Revert "cleaning up a few unnecessary modules"
|
Revert "cleaning up a few unnecessary modules"
This reverts commit 6b6911ca54a8bb61a1715c19be71729a55497278.
|
Python
|
mit
|
jut-io/jut-python-tools
|
---
+++
@@ -23,6 +23,10 @@
],
test_suite='tests',
+ tests_install=[
+ 'sh==1.11'
+ ],
+
keywords=[''],
packages=find_packages(exclude=['tests']),
|
6dd3946a0ecf63d913efc71150d753895710001e
|
setup.py
|
setup.py
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
""" Install the `arte_plus7` script """
from setuptools import setup
NAME = 'arte_plus7'
def get_version(module):
""" Extract package version without importing file
Importing cause issues with coverage,
(modules can be removed from sys.modules to prevent this)
Inspired from pep8 setup.py
"""
with open('%s.py' % module) as module_fd:
for line in module_fd:
if line.startswith('__version__'):
return eval(line.split('=')[-1]) # pylint:disable=eval-used
setup(
name=NAME,
version=get_version(NAME),
description='CLI script to get videos from Arte plus 7 using their URL',
author='cladmi',
download_url='https://github.com/cladmi/arte_plus7',
py_modules=NAME,
entry_points={
'console_scripts': ['{name} = {name}:main'.format(name=NAME)],
},
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Intended Audience :: End Users/Desktop',
'Topic :: utilities',
],
install_requires=['argparse', 'beautifulsoup4'],
)
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
""" Install the `arte_plus7` script """
from setuptools import setup
NAME = 'arte_plus7'
def get_version(module):
""" Extract package version without importing file
Importing cause issues with coverage,
(modules can be removed from sys.modules to prevent this)
Inspired from pep8 setup.py
"""
with open('%s.py' % module) as module_fd:
for line in module_fd:
if line.startswith('__version__'):
return eval(line.split('=')[-1]) # pylint:disable=eval-used
setup(
name=NAME,
version=get_version(NAME),
description='CLI script to get videos from Arte plus 7 using their URL',
author='cladmi',
download_url='https://github.com/cladmi/arte_plus7',
py_modules=[NAME],
entry_points={
'console_scripts': ['{name} = {name}:main'.format(name=NAME)],
},
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Intended Audience :: End Users/Desktop',
'Topic :: utilities',
],
install_requires=['argparse', 'beautifulsoup4'],
)
|
Correct py_modules as a list
|
Correct py_modules as a list
|
Python
|
mit
|
cladmi/arte_plus7
|
---
+++
@@ -27,7 +27,7 @@
description='CLI script to get videos from Arte plus 7 using their URL',
author='cladmi',
download_url='https://github.com/cladmi/arte_plus7',
- py_modules=NAME,
+ py_modules=[NAME],
entry_points={
'console_scripts': ['{name} = {name}:main'.format(name=NAME)],
},
|
be2cd54386c0fb9c407ac5dc7da467547b0b426e
|
aldryn_apphooks_config/utils.py
|
aldryn_apphooks_config/utils.py
|
# -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
# -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve, Resolver404
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
if app and app.app_config:
try:
config = None
namespace = resolve(request.path).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
except Resolver404:
pass
return '', None
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
Add checks to get_app_instance to avoid Resolver404 even if namespace does not exists
|
Add checks to get_app_instance to avoid Resolver404 even if namespace does not exists
|
Python
|
bsd-3-clause
|
aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config
|
---
+++
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
-from django.core.urlresolvers import resolve
+from django.core.urlresolvers import resolve, Resolver404
def get_app_instance(request):
@@ -15,11 +15,16 @@
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
- config = None
- namespace = resolve(request.path_info).namespace
if app and app.app_config:
- config = app.get_config(namespace)
- return namespace, config
+ try:
+ config = None
+ namespace = resolve(request.path).namespace
+ if app and app.app_config:
+ config = app.get_config(namespace)
+ return namespace, config
+ except Resolver404:
+ pass
+ return '', None
def setup_config(form_class, config_model):
|
300415c30dab2f8d2622c8b8de03f433bf6d5960
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from jamo import __version__
import sys
if sys.version_info <= (3, 0):
print("ERROR: jamo requires Python 3.0 or later "
"(bleeding edge preferred)", file=sys.stderr)
sys.exit(1)
with open('README.rst') as f:
long_description = f.read()
setup(
name="jamo",
version=__version__,
description="A Hangul syllable and jamo analyzer.",
long_description=long_description,
url="https://github.com/jdong820/python-jamo",
author="Joshua Dong",
author_email="jdong42@gmail.com",
license="http://www.apache.org/licenses/LICENSE-2.0",
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
keywords="Korean Hangul jamo syllable nlp",
packages=find_packages(),
package_dir={'jamo': 'jamo'},
package_data={'jamo': ['data/*.json']},
)
|
from setuptools import setup, find_packages
from jamo import __version__
import sys
if sys.version_info <= (3, 0):
print("ERROR: jamo requires Python 3.0 or later "
"(bleeding edge preferred)", file=sys.stderr)
sys.exit(1)
with open('README.rst', encoding='utf8') as f:
long_description = f.read()
setup(
name="jamo",
version=__version__,
description="A Hangul syllable and jamo analyzer.",
long_description=long_description,
url="https://github.com/jdong820/python-jamo",
author="Joshua Dong",
author_email="jdong42@gmail.com",
license="http://www.apache.org/licenses/LICENSE-2.0",
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
keywords="Korean Hangul jamo syllable nlp",
packages=find_packages(),
package_dir={'jamo': 'jamo'},
package_data={'jamo': ['data/*.json']},
)
|
Fix for encoding bug during installation on Windows
|
Fix for encoding bug during installation on Windows
|
Python
|
apache-2.0
|
JDongian/python-jamo
|
---
+++
@@ -7,7 +7,7 @@
"(bleeding edge preferred)", file=sys.stderr)
sys.exit(1)
-with open('README.rst') as f:
+with open('README.rst', encoding='utf8') as f:
long_description = f.read()
setup(
|
7941c60ba5a52ead654e5816ee39e48b9e927a21
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from distutils.core import setup
setup(
name='LoadStone',
version='0.1',
description='Interface for FFXIV Lodestone',
author='Sami Elahmadie',
author_email='s.elahmadie@gmail.com',
url='https://github.com/Demotivated/loadstone/',
packages=['api'],
install_requires=[
'flask==0.10.1',
'flask_sqlalchemy==2.0',
'lxml==3.4.4',
'psycopg2==2.6.1',
'pytest==2.8.2',
'pytest-flask==0.10.0',
'requests==2.8.1',
]
)
|
#!/usr/bin/env python3
from distutils.core import setup
setup(
name='LoadStone',
version='0.1',
description='Interface for FFXIV Lodestone',
author='Sami Elahmadie',
author_email='s.elahmadie@gmail.com',
url='https://github.com/Demotivated/loadstone/',
packages=['api'],
install_requires=[
'flask==0.10.1',
'flask_sqlalchemy==2.0',
'lxml==3.4.4',
'psycopg2==2.6.1',
'pytest==2.8.2',
'pytest-flask==0.10.0',
'requests==2.8.1',
'sphinx==1.3.1',
'sphinx-rtd-theme==0.1.9'
]
)
|
Add sphinx & theme to requirements
|
Add sphinx & theme to requirements
|
Python
|
mit
|
Demotivated/loadstone
|
---
+++
@@ -18,5 +18,7 @@
'pytest==2.8.2',
'pytest-flask==0.10.0',
'requests==2.8.1',
+ 'sphinx==1.3.1',
+ 'sphinx-rtd-theme==0.1.9'
]
)
|
433c041e3016fce0f1e3edbb9dae980f69071f3b
|
setup.py
|
setup.py
|
import os
from setuptools import setup
setup(
name = "pyscribe",
version = "0.1.1",
author = "Alexander Wang",
author_email = "alexanderw@berkeley.edu",
description = ("PyScribe makes print debugging easier and more efficient"),
license = "MIT",
keywords = "python pyscribe debug print",
url = "https://github.com/alixander/pyscribe",
download_url = "https://github.com/alixander/pyscribe/tarbell/0.1.1",
entry_points={
'console_scripts': [
'pyscribe = pyscribe.pyscribe:main',
],
},
packages=['pyscribe'],
classifiers=[
"Development Status :: 3 - Alpha"
],
)
|
import os
from setuptools import setup
setup(
name = "pyscribe",
version = "0.1.1",
author = "Alexander Wang",
author_email = "alexanderw@berkeley.edu",
description = ("PyScribe makes print debugging easier and more efficient"),
license = "MIT",
keywords = "python pyscribe debug print",
url = "https://github.com/alixander/pyscribe",
download_url = "https://github.com/alixander/pyscribe/tarbell/0.1.2",
entry_points={
'console_scripts': [
'pyscribe = pyscribe.pyscribe:main',
],
},
packages=['pyscribe'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
)
|
Add classifiers since python3 now supported
|
Add classifiers since python3 now supported
|
Python
|
mit
|
alixander/PyScribe
|
---
+++
@@ -9,7 +9,7 @@
license = "MIT",
keywords = "python pyscribe debug print",
url = "https://github.com/alixander/pyscribe",
- download_url = "https://github.com/alixander/pyscribe/tarbell/0.1.1",
+ download_url = "https://github.com/alixander/pyscribe/tarbell/0.1.2",
entry_points={
'console_scripts': [
'pyscribe = pyscribe.pyscribe:main',
@@ -17,6 +17,10 @@
},
packages=['pyscribe'],
classifiers=[
- "Development Status :: 3 - Alpha"
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.4',
],
)
|
9cb532115f44b25101efa7d448328e35ca51c37c
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
setup(
name='publish-aws-lambda',
version='0.3',
description='Publish a Python module as a set of AWS lambda functions',
url='https://github.com/ophirh/publish-aws-lambda',
author='Ophir',
author_email='opensource@itculate.io',
license='MIT',
keywords=['aws', 'lambda', 'publish'],
packages=find_packages(),
)
|
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
setup(
name='publish-aws-lambda',
version='0.3.1',
description='Publish a Python module as a set of AWS lambda functions',
url='https://github.com/ophirh/publish-aws-lambda',
author='Ophir',
author_email='opensource@itculate.io',
license='MIT',
keywords=['aws', 'lambda', 'publish'],
packages=find_packages(),
)
|
Fix issue with deleting boto3 from ZIP file
|
Fix issue with deleting boto3 from ZIP file
|
Python
|
mit
|
ophirh/publish-aws-lambda
|
---
+++
@@ -5,7 +5,7 @@
setup(
name='publish-aws-lambda',
- version='0.3',
+ version='0.3.1',
description='Publish a Python module as a set of AWS lambda functions',
url='https://github.com/ophirh/publish-aws-lambda',
author='Ophir',
|
f80ef1738608ececd6fb2b6a034ca9263f06ae17
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-ical',
version='1.4',
description="iCal feeds for Django based on Django's syndication feed "
"framework.",
long_description=(open('README.rst').read() + '\n' +
open('CHANGES.rst').read()),
author='Ian Lewis',
author_email='IanMLewis@gmail.com',
license='MIT License',
url='https://github.com/Pinkerton/django-ical',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'Django>=1.3.4',
'icalendar>=3.1',
],
packages=find_packages(),
test_suite='tests.main',
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-ical',
version='1.4',
description="iCal feeds for Django based on Django's syndication feed "
"framework.",
long_description=(open('README.rst').read() + '\n' +
open('CHANGES.rst').read()),
author='Ian Lewis',
author_email='IanMLewis@gmail.com',
license='MIT License',
url='https://github.com/Pinkerton/django-ical',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Framework :: Django :: 1.3',
'Framework :: Django :: 1.4',
'Framework :: Django :: 1.5',
'Framework :: Django :: 1.6',
'Framework :: Django :: 1.7',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'Django>=1.3.4',
'icalendar>=3.1',
],
packages=find_packages(),
test_suite='tests.main',
)
|
Add Python & Django classifiers
|
Add Python & Django classifiers
|
Python
|
mit
|
Pinkerton/django-ical
|
---
+++
@@ -17,9 +17,22 @@
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
+ 'Framework :: Django :: 1.3',
+ 'Framework :: Django :: 1.4',
+ 'Framework :: Django :: 1.5',
+ 'Framework :: Django :: 1.6',
+ 'Framework :: Django :: 1.7',
+ 'Framework :: Django :: 1.8',
+ 'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
- 'License :: OSI Approved :: BSD License',
+ 'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
|
535b9f95c89974714eacece9bf5109fd1ec5116f
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
with open('requirements.txt', 'r') as f:
requirements = f.readlines()
setup(
name='resultsdb-updater',
version='3.0.0',
description=('A micro-service that listens for messages on the message '
'bus and updates ResultsDB'),
license='GPLv2+',
author='Matt Prahl',
author_email='mprahl@redhat.com',
url='https://github.com/release-engineering/resultsdb-updater',
install_requires=requirements,
packages=find_packages(),
include_data=True,
entry_points="""
[moksha.consumer]
ciconsumer = resultsdbupdater.consumer:CIConsumer
""",
)
|
from setuptools import setup, find_packages
with open('requirements.txt', 'r') as f:
requirements = f.readlines()
setup(
name='resultsdb-updater',
version='3.0.0',
description=('A micro-service that listens for messages on the message '
'bus and updates ResultsDB'),
license='GPLv2+',
author='Matt Prahl',
author_email='mprahl@redhat.com',
url='https://github.com/release-engineering/resultsdb-updater',
install_requires=requirements,
packages=find_packages(),
entry_points="""
[moksha.consumer]
ciconsumer = resultsdbupdater.consumer:CIConsumer
""",
)
|
Resolve warning "Unknown distribution option: 'include_data'"
|
Resolve warning "Unknown distribution option: 'include_data'"
|
Python
|
lgpl-2.1
|
release-engineering/resultsdb-updater,release-engineering/resultsdb-updater
|
---
+++
@@ -14,7 +14,6 @@
url='https://github.com/release-engineering/resultsdb-updater',
install_requires=requirements,
packages=find_packages(),
- include_data=True,
entry_points="""
[moksha.consumer]
ciconsumer = resultsdbupdater.consumer:CIConsumer
|
a5c0f5c46c64e56e0a4a0791b86b820e8ed0241b
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
setup(name='sophiabus230',
version='0.4',
description='Module to get the timetable of the Sophia Antipolis bus line 230',
url='http://github.com/paraita/sophiabus230',
author='Paraita Wohler',
author_email='paraita.wohler@gmail.com',
license='MIT',
packages=['sophiabus230'],
install_requires=[
'beautifulsoup4',
'python-dateutil',
'future'
],
test_suite='nose.collector',
tests_require=[
'mock',
'nose',
'coverage',
'coveralls'
],
zip_safe=False)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
setup(name='sophiabus230',
version='0.5',
description='Module to get the timetable of the Sophia Antipolis bus line 230',
url='http://github.com/paraita/sophiabus230',
author='Paraita Wohler',
author_email='paraita.wohler@gmail.com',
license='MIT',
packages=['sophiabus230'],
install_requires=[
'beautifulsoup4',
'python-dateutil',
'future'
],
test_suite='nose.collector',
tests_require=[
'mock',
'nose',
'coverage',
'coveralls'
],
zip_safe=False)
|
Update package version for Pypi
|
Update package version for Pypi
|
Python
|
mit
|
paraita/sophiabus230,paraita/sophiabus230
|
---
+++
@@ -4,7 +4,7 @@
from setuptools import setup
setup(name='sophiabus230',
- version='0.4',
+ version='0.5',
description='Module to get the timetable of the Sophia Antipolis bus line 230',
url='http://github.com/paraita/sophiabus230',
author='Paraita Wohler',
|
d96b8f237e79c924865b429b5e8a725630537d2b
|
setup.py
|
setup.py
|
from setuptools import setup
with open('README.rst') as README:
long_description = README.read()
long_description = long_description[long_description.index('Description'):]
setup(name='wos',
version='0.1.2',
description='Web of Science client using API v3.',
long_description=long_description,
install_requires=['suds'],
url='http://github.com/enricobacis/wos',
author='Enrico Bacis',
author_email='enrico.bacis@gmail.com',
license='MIT',
packages=['wos'],
scripts=['scripts/wos'],
keywords='wos isi web of science knowledge api client'
)
|
from setuptools import setup
with open('README.rst') as README:
long_description = README.read()
long_description = long_description[long_description.index('Description'):]
setup(name='wos',
version='0.1.2',
description='Web of Science client using API v3.',
long_description=long_description,
extras_require={
':python_version <= "2.7"': ['suds'],
':python_version >= "3.0"': ['suds-jurko'],
},
url='http://github.com/enricobacis/wos',
author='Enrico Bacis',
author_email='enrico.bacis@gmail.com',
license='MIT',
packages=['wos'],
scripts=['scripts/wos'],
keywords='wos isi web of science knowledge api client'
)
|
Use suds-jurko for python 3
|
Use suds-jurko for python 3
|
Python
|
mit
|
enricobacis/wos
|
---
+++
@@ -8,7 +8,10 @@
version='0.1.2',
description='Web of Science client using API v3.',
long_description=long_description,
- install_requires=['suds'],
+ extras_require={
+ ':python_version <= "2.7"': ['suds'],
+ ':python_version >= "3.0"': ['suds-jurko'],
+ },
url='http://github.com/enricobacis/wos',
author='Enrico Bacis',
author_email='enrico.bacis@gmail.com',
|
8ccc50c222163b76e2284438ab3779de13ba5c6d
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from os import path
from setuptools import find_packages, setup
README_rst = path.join(path.abspath(path.dirname(__file__)), 'README.rst')
with open(README_rst, 'r') as f:
long_description = f.read()
setup(
name="pyee",
vcversioner={},
packages=find_packages(),
setup_requires=[
'pytest-runner',
'pytest-asyncio;python_version>"3.4"',
'vcversioner'
],
tests_require=['twisted'],
include_package_data=True,
description="A port of node.js's EventEmitter to python.",
long_description=long_description,
author="Joshua Holbrook",
author_email="josh.holbrook@gmail.com",
url="https://github.com/jfhbrook/pyee",
keywords=[
"events", "emitter", "node.js", "node", "eventemitter",
"event_emitter"
],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Other/Nonlisted Topic"
]
)
|
# -*- coding: utf-8 -*-
from os import path
from setuptools import find_packages, setup
README_rst = path.join(path.abspath(path.dirname(__file__)), 'README.rst')
with open(README_rst, 'r') as f:
long_description = f.read()
setup(
name="pyee",
vcversioner={},
packages=find_packages(),
setup_requires=[
'pytest-runner',
'pytest-asyncio; python_version > "3.4"',
'vcversioner'
],
tests_require=['twisted'],
include_package_data=True,
description="A port of node.js's EventEmitter to python.",
long_description=long_description,
author="Joshua Holbrook",
author_email="josh.holbrook@gmail.com",
url="https://github.com/jfhbrook/pyee",
keywords=[
"events", "emitter", "node.js", "node", "eventemitter",
"event_emitter"
],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Other/Nonlisted Topic"
]
)
|
Add whitespace to version spec?
|
Add whitespace to version spec?
|
Python
|
mit
|
jfhbrook/pyee
|
---
+++
@@ -15,7 +15,7 @@
packages=find_packages(),
setup_requires=[
'pytest-runner',
- 'pytest-asyncio;python_version>"3.4"',
+ 'pytest-asyncio; python_version > "3.4"',
'vcversioner'
],
tests_require=['twisted'],
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.