commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
e4e3f0d7270e93e6123dbf05e1f51993e38d970c
|
tests/cpydiff/types_exception_subclassinit.py
|
tests/cpydiff/types_exception_subclassinit.py
|
"""
categories: Types,Exception
description: Exception.__init__ raises TypeError if overridden and called by subclass
cause: Unknown
workaround: Unknown
"""
class A(Exception):
def __init__(self):
Exception.__init__(self)
a = A()
|
"""
categories: Types,Exception
description: Exception.__init__ method does not exist.
cause: Subclassing native classes is not fully supported in MicroPython.
workaround: Call using ``super()`` instead::
class A(Exception):
def __init__(self):
super().__init__()
"""
class A(Exception):
def __init__(self):
Exception.__init__(self)
a = A()
|
Update subclassing Exception case and give work-around.
|
tests/cpydiff: Update subclassing Exception case and give work-around.
|
Python
|
mit
|
adafruit/micropython,pfalcon/micropython,adafruit/circuitpython,blazewicz/micropython,adafruit/circuitpython,blazewicz/micropython,ryannathans/micropython,ryannathans/micropython,henriknelson/micropython,MrSurly/micropython,trezor/micropython,adafruit/micropython,adafruit/micropython,dmazzella/micropython,pramasoul/micropython,tobbad/micropython,adafruit/circuitpython,selste/micropython,henriknelson/micropython,pozetroninc/micropython,trezor/micropython,kerneltask/micropython,infinnovation/micropython,ryannathans/micropython,infinnovation/micropython,pramasoul/micropython,bvernoux/micropython,dmazzella/micropython,blazewicz/micropython,tobbad/micropython,pfalcon/micropython,pozetroninc/micropython,adafruit/circuitpython,infinnovation/micropython,henriknelson/micropython,pramasoul/micropython,pfalcon/micropython,kerneltask/micropython,tobbad/micropython,adafruit/circuitpython,MrSurly/micropython,tobbad/micropython,bvernoux/micropython,adafruit/micropython,trezor/micropython,henriknelson/micropython,tralamazza/micropython,pfalcon/micropython,pramasoul/micropython,kerneltask/micropython,ryannathans/micropython,pozetroninc/micropython,MrSurly/micropython,infinnovation/micropython,trezor/micropython,dmazzella/micropython,pozetroninc/micropython,MrSurly/micropython,selste/micropython,swegener/micropython,swegener/micropython,tobbad/micropython,swegener/micropython,bvernoux/micropython,blazewicz/micropython,adafruit/circuitpython,swegener/micropython,pozetroninc/micropython,swegener/micropython,selste/micropython,kerneltask/micropython,blazewicz/micropython,dmazzella/micropython,tralamazza/micropython,selste/micropython,bvernoux/micropython,tralamazza/micropython,adafruit/micropython,ryannathans/micropython,MrSurly/micropython,bvernoux/micropython,selste/micropython,infinnovation/micropython,trezor/micropython,pfalcon/micropython,tralamazza/micropython,kerneltask/micropython,pramasoul/micropython,henriknelson/micropython
|
---
+++
@@ -1,8 +1,12 @@
"""
categories: Types,Exception
-description: Exception.__init__ raises TypeError if overridden and called by subclass
-cause: Unknown
-workaround: Unknown
+description: Exception.__init__ method does not exist.
+cause: Subclassing native classes is not fully supported in MicroPython.
+workaround: Call using ``super()`` instead::
+
+class A(Exception):
+ def __init__(self):
+ super().__init__()
"""
class A(Exception):
def __init__(self):
|
1f7979edaa918a52702bea5de6f2bdd7a8e60796
|
encryption.py
|
encryption.py
|
import base64
from Crypto.Cipher import AES
from Crypto import Random
def encrypt(raw, key):
raw = pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw))
def decrypt(enc, key):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(key, AES.MODE_CBC, iv)
result = unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8')
return result
def pad(s):
bs = 32
return s + (bs - len(s) % bs) * chr(bs - len(s) % bs)
def unpad(s):
return s[:-ord(s[len(s)-1:])]
|
import base64
from Crypto.Cipher import AES
from Crypto import Random
def encrypt(raw, key):
raw = pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw)).decode('utf-8')
def decrypt(enc, key):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(key, AES.MODE_CBC, iv)
result = unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8')
return result
def pad(s):
bs = 32
return s + (bs - len(s) % bs) * chr(bs - len(s) % bs)
def unpad(s):
return s[:-ord(s[len(s)-1:])]
|
Add decode(utf-8) to return on encrypt
|
Add decode(utf-8) to return on encrypt
|
Python
|
mit
|
regexpressyourself/passman
|
---
+++
@@ -6,7 +6,7 @@
raw = pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(key, AES.MODE_CBC, iv)
- return base64.b64encode(iv + cipher.encrypt(raw))
+ return base64.b64encode(iv + cipher.encrypt(raw)).decode('utf-8')
def decrypt(enc, key):
enc = base64.b64decode(enc)
|
68374c16d66cdeea9dbce620dc29d375e3009070
|
bcbio/bam/fasta.py
|
bcbio/bam/fasta.py
|
from Bio import SeqIO
def sequence_length(fasta):
"""
return a dict of the lengths of sequences in a fasta file
"""
file_handle = open(fasta)
in_handle = SeqIO.parse(file_handle, "fasta")
records = {record.id: len(record) for record in in_handle}
file_handle.close()
return records
|
from Bio import SeqIO
def sequence_length(fasta):
"""
return a dict of the lengths of sequences in a fasta file
"""
sequences = SeqIO.parse(fasta, "fasta")
records = {record.id: len(record) for record in sequences}
return records
def sequence_names(fasta):
"""
return a list of the sequence IDs in a FASTA file
"""
sequences = SeqIO.parse(fasta, "fasta")
records = [record.id for record in sequences]
return records
|
Add function to get list of sequence names from a FASTA file.
|
Add function to get list of sequence names from a FASTA file.
Refactor to be simpler.
|
Python
|
mit
|
vladsaveliev/bcbio-nextgen,biocyberman/bcbio-nextgen,chapmanb/bcbio-nextgen,chapmanb/bcbio-nextgen,biocyberman/bcbio-nextgen,chapmanb/bcbio-nextgen,vladsaveliev/bcbio-nextgen,lbeltrame/bcbio-nextgen,vladsaveliev/bcbio-nextgen,brainstorm/bcbio-nextgen,lbeltrame/bcbio-nextgen,biocyberman/bcbio-nextgen,a113n/bcbio-nextgen,brainstorm/bcbio-nextgen,brainstorm/bcbio-nextgen,a113n/bcbio-nextgen,lbeltrame/bcbio-nextgen,a113n/bcbio-nextgen
|
---
+++
@@ -4,8 +4,14 @@
"""
return a dict of the lengths of sequences in a fasta file
"""
- file_handle = open(fasta)
- in_handle = SeqIO.parse(file_handle, "fasta")
- records = {record.id: len(record) for record in in_handle}
- file_handle.close()
+ sequences = SeqIO.parse(fasta, "fasta")
+ records = {record.id: len(record) for record in sequences}
return records
+
+def sequence_names(fasta):
+ """
+ return a list of the sequence IDs in a FASTA file
+ """
+ sequences = SeqIO.parse(fasta, "fasta")
+ records = [record.id for record in sequences]
+ return records
|
e6d7ec55bf00960d42b3288ae5e0e501030d5fa9
|
test/library/gyptest-shared-obj-install-path.py
|
test/library/gyptest-shared-obj-install-path.py
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
Add with_statement import for python2.5.
|
Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003
|
Python
|
bsd-3-clause
|
witwall/gyp,witwall/gyp,witwall/gyp,witwall/gyp,witwall/gyp
|
---
+++
@@ -8,6 +8,9 @@
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
+
+# Python 2.5 needs this for the with statement.
+from __future__ import with_statement
import os
import TestGyp
|
4e1d0ce04c762d60eedf5bd2ecdd689fb706cbc2
|
anserv/cronjobs/__init__.py
|
anserv/cronjobs/__init__.py
|
registered = {}
registered_lock = {}
parameters = {}
def register(f=None, lock=True, params={}):
"""Decorator to add the function to the cronjob library.
@cronjobs.register
def my_task():
print('I can be run once/machine at a time.')
@cronjobs.register(lock=False)
def my_task():
print('I am concurrent friendly!')
"""
def decorator(f, lock=lock):
registered[f.__name__] = f
parameters[f.__name__] = params
if lock:
registered_lock[f.__name__] = f
return f
if callable(f):
return decorator(f, lock)
return decorator
|
registered = {}
registered_lock = {}
parameters = {}
from decorator import decorator
def register(f=None, lock=True, params={}):
"""Decorator to add the function to the cronjob library.
@cronjobs.register
def my_task():
print('I can be run once/machine at a time.')
@cronjobs.register(lock=False)
def my_task():
print('I am concurrent friendly!')
"""
def cron_decorator(f, lock=lock):
registered[f.__name__] = f
parameters[f.__name__] = params
if lock:
registered_lock[f.__name__] = f
return f
if callable(f):
return decorator(cron_decorator(f, lock),f)
return cron_decorator
|
Change decorators in cron to preserve signature
|
Change decorators in cron to preserve signature
|
Python
|
agpl-3.0
|
edx/edxanalytics,edx/edxanalytics,edx/edxanalytics,edx/insights,edx/insights,edx/edxanalytics
|
---
+++
@@ -1,6 +1,7 @@
registered = {}
registered_lock = {}
parameters = {}
+from decorator import decorator
def register(f=None, lock=True, params={}):
"""Decorator to add the function to the cronjob library.
@@ -15,7 +16,7 @@
"""
- def decorator(f, lock=lock):
+ def cron_decorator(f, lock=lock):
registered[f.__name__] = f
parameters[f.__name__] = params
if lock:
@@ -23,5 +24,5 @@
return f
if callable(f):
- return decorator(f, lock)
- return decorator
+ return decorator(cron_decorator(f, lock),f)
+ return cron_decorator
|
c3a15b4753ecfe7436b08456da90efb7be545a50
|
test/test_exceptions.py
|
test/test_exceptions.py
|
from neomodel import StructuredNode, StringProperty, DoesNotExist, CypherException
import pickle
class Person(StructuredNode):
name = StringProperty(unique_index=True)
def test_cypher_exception_can_be_displayed():
print CypherException("SOME QUERY", (), "ERROR", None, None)
def test_object_does_not_exist():
try:
Person.nodes.get(name="johnny")
except Person.DoesNotExist as e:
pickle_instance = pickle.dumps(e)
assert pickle_instance
assert pickle.loads(pickle_instance)
assert isinstance(pickle.loads(pickle_instance), DoesNotExist)
def test_raise_does_not_exist():
try:
raise DoesNotExist("My Test Message")
except DoesNotExist as e:
pickle_instance = pickle.dumps(e)
assert pickle_instance
assert pickle.loads(pickle_instance)
assert isinstance(pickle.loads(pickle_instance), DoesNotExist)
|
from neomodel import StructuredNode, StringProperty, DoesNotExist, CypherException
import pickle
class Person(StructuredNode):
name = StringProperty(unique_index=True)
def test_cypher_exception_can_be_displayed():
print(CypherException("SOME QUERY", (), "ERROR", None, None))
def test_object_does_not_exist():
try:
Person.nodes.get(name="johnny")
except Person.DoesNotExist as e:
pickle_instance = pickle.dumps(e)
assert pickle_instance
assert pickle.loads(pickle_instance)
assert isinstance(pickle.loads(pickle_instance), DoesNotExist)
def test_raise_does_not_exist():
try:
raise DoesNotExist("My Test Message")
except DoesNotExist as e:
pickle_instance = pickle.dumps(e)
assert pickle_instance
assert pickle.loads(pickle_instance)
assert isinstance(pickle.loads(pickle_instance), DoesNotExist)
|
Fix syntax Error for compability with python 3.X
|
Fix syntax Error for compability with python 3.X
|
Python
|
mit
|
robinedwards/neomodel,wcooley/neomodel,robinedwards/neomodel,pombredanne/neomodel,fpieper/neomodel
|
---
+++
@@ -7,7 +7,7 @@
def test_cypher_exception_can_be_displayed():
- print CypherException("SOME QUERY", (), "ERROR", None, None)
+ print(CypherException("SOME QUERY", (), "ERROR", None, None))
def test_object_does_not_exist():
try:
|
14a4b836853909763b8961dfcdc58477607180fd
|
protocols/views.py
|
protocols/views.py
|
from django.shortcuts import render
from django.conf.urls.defaults import *
from django.contrib.auth.decorators import user_passes_test
from .forms import ProtocolForm, TopicFormSet
def can_add_protocols(user):
return user.is_authenticated() and user.has_perm('protocols.add_protocol')
@user_passes_test(can_add_protocols)
def add(request):
data = request.POST if request.POST else None
protocol_form = ProtocolForm(data)
topic_form = TopicFormSet(data)
if protocol_form.is_valid() and topic_form.is_valid():
protocol_form.save()
topic_form.save()
return render(request, 'protocols/add.html', locals())
|
from django.shortcuts import render
from django.conf.urls import *
from django.contrib.auth.decorators import user_passes_test
from .forms import ProtocolForm, TopicFormSet
def can_add_protocols(user):
return user.is_authenticated() and user.has_perm('protocols.add_protocol')
@user_passes_test(can_add_protocols)
def add(request):
data = request.POST if request.POST else None
protocol_form = ProtocolForm(data)
topic_form = TopicFormSet(data)
if protocol_form.is_valid() and topic_form.is_valid():
protocol_form.save()
topic_form.save()
return render(request, 'protocols/add.html', locals())
|
Change django.conf.urls.defaults (it is depricated)
|
Change django.conf.urls.defaults (it is depricated)
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
---
+++
@@ -1,5 +1,5 @@
from django.shortcuts import render
-from django.conf.urls.defaults import *
+from django.conf.urls import *
from django.contrib.auth.decorators import user_passes_test
|
6fa5c20f4d3b6ea9716adbf4c5fd50739f2f987e
|
protractor/test.py
|
protractor/test.py
|
# -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
protractor_command += ' --params.live_server_url={}'.format(self.live_server_url)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
# -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().iteritems():
protractor_command += ' --params.{key}={value}'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
Add hook for protactor params
|
Add hook for protactor params
|
Python
|
mit
|
jpulec/django-protractor,penguin359/django-protractor
|
---
+++
@@ -22,12 +22,21 @@
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
+ def get_protractor_params(self):
+ """A hook for adding params that protractor will receive."""
+ return {
+ 'live_server_url': self.live_server_url
+ }
+
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
- protractor_command += ' --params.live_server_url={}'.format(self.live_server_url)
+ for key, value in self.get_protractor_params().iteritems():
+ protractor_command += ' --params.{key}={value}'.format(
+ key=key, value=value
+ )
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
ac850c8f9284fbe6fd8e6318431d5e4856f26c7c
|
openquake/calculators/tests/classical_risk_test.py
|
openquake/calculators/tests/classical_risk_test.py
|
import unittest
from nose.plugins.attrib import attr
from openquake.qa_tests_data.classical_risk import (
case_1, case_2, case_3, case_4)
from openquake.calculators.tests import CalculatorTestCase
class ClassicalRiskTestCase(CalculatorTestCase):
@attr('qa', 'risk', 'classical_risk')
def test_case_1(self):
raise unittest.SkipTest
@attr('qa', 'risk', 'classical_risk')
def test_case_2(self):
raise unittest.SkipTest
@attr('qa', 'risk', 'classical_risk')
def test_case_3(self):
out = self.run_calc(case_3.__file__, 'job_haz.ini,job_risk.ini',
exports='csv')
[fname] = out['avg_losses-rlzs', 'csv']
self.assertEqualFiles('expected/rlz-000-avg_loss.csv', fname)
@attr('qa', 'risk', 'classical_risk')
def test_case_4(self):
out = self.run_calc(case_4.__file__, 'job_haz.ini,job_risk.ini',
exports='csv')
fnames = out['avg_losses-rlzs', 'csv']
self.assertEqualFiles('expected/rlz-000-avg_loss.csv', fnames[0])
self.assertEqualFiles('expected/rlz-001-avg_loss.csv', fnames[1])
|
import unittest
from nose.plugins.attrib import attr
from openquake.qa_tests_data.classical_risk import (
case_1, case_2, case_3, case_4)
from openquake.calculators.tests import CalculatorTestCase
class ClassicalRiskTestCase(CalculatorTestCase):
@attr('qa', 'risk', 'classical_risk')
def test_case_1(self):
out = self.run_calc(case_1.__file__, 'job_risk.ini', exports='xml')
@attr('qa', 'risk', 'classical_risk')
def test_case_2(self):
raise unittest.SkipTest
@attr('qa', 'risk', 'classical_risk')
def test_case_3(self):
out = self.run_calc(case_3.__file__, 'job_haz.ini,job_risk.ini',
exports='csv')
[fname] = out['avg_losses-rlzs', 'csv']
self.assertEqualFiles('expected/rlz-000-avg_loss.csv', fname)
@attr('qa', 'risk', 'classical_risk')
def test_case_4(self):
out = self.run_calc(case_4.__file__, 'job_haz.ini,job_risk.ini',
exports='csv')
fnames = out['avg_losses-rlzs', 'csv']
self.assertEqualFiles('expected/rlz-000-avg_loss.csv', fnames[0])
self.assertEqualFiles('expected/rlz-001-avg_loss.csv', fnames[1])
|
Work on classical_risk test_case_1 and test_case_2
|
Work on classical_risk test_case_1 and test_case_2
|
Python
|
agpl-3.0
|
gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine
|
---
+++
@@ -10,7 +10,7 @@
@attr('qa', 'risk', 'classical_risk')
def test_case_1(self):
- raise unittest.SkipTest
+ out = self.run_calc(case_1.__file__, 'job_risk.ini', exports='xml')
@attr('qa', 'risk', 'classical_risk')
def test_case_2(self):
|
9a5229fe7ae4a240d91bfae59b61c5e8dda1aa13
|
bucketeer/test/test_commit.py
|
bucketeer/test/test_commit.py
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
|
Add test for new file upload to existing bucket
|
Add test for new file upload to existing bucket
|
Python
|
mit
|
mgarbacz/bucketeer
|
---
+++
@@ -45,6 +45,9 @@
def testMain(self):
self.assertTrue(commit)
+ def testNewFileUploadExistingBucket(self):
+ result = commit.commit_to_s3(existing_bucket, test_dir)
+ self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
|
5da51e1820c03a76dfdb9926023848b7399691da
|
inthe_am/taskmanager/models/usermetadata.py
|
inthe_am/taskmanager/models/usermetadata.py
|
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
class UserMetadata(models.Model):
user = models.ForeignKey(
User, related_name="metadata", unique=True, on_delete=models.CASCADE
)
tos_version = models.IntegerField(default=0)
tos_accepted = models.DateTimeField(default=None, null=True,)
privacy_policy_version = models.IntegerField(default=0)
privacy_policy_accepted = models.DateTimeField(default=None, null=True,)
colorscheme = models.CharField(default="dark-yellow-green.theme", max_length=255,)
@property
def tos_up_to_date(self):
return self.tos_version == settings.TOS_VERSION
@property
def privacy_policy_up_to_date(self):
return self.privacy_policy_version == settings.PRIVACY_POLICY_VERSION
@classmethod
def get_for_user(cls, user):
meta, created = UserMetadata.objects.get_or_create(user=user)
return meta
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
from . import TaskStore
if self.tos_up_to_date and self.privacy_policy_up_to_date:
store = TaskStore.get_for_user(self.user)
store.taskd_account.resume()
def __str__(self):
return self.user.username
class Meta:
app_label = "taskmanager"
|
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
class UserMetadata(models.Model):
user = models.OneToOneField(
User, related_name="metadata", on_delete=models.CASCADE
)
tos_version = models.IntegerField(default=0)
tos_accepted = models.DateTimeField(default=None, null=True,)
privacy_policy_version = models.IntegerField(default=0)
privacy_policy_accepted = models.DateTimeField(default=None, null=True,)
colorscheme = models.CharField(default="dark-yellow-green.theme", max_length=255,)
@property
def tos_up_to_date(self):
return self.tos_version == settings.TOS_VERSION
@property
def privacy_policy_up_to_date(self):
return self.privacy_policy_version == settings.PRIVACY_POLICY_VERSION
@classmethod
def get_for_user(cls, user):
meta, created = UserMetadata.objects.get_or_create(user=user)
return meta
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
from . import TaskStore
if self.tos_up_to_date and self.privacy_policy_up_to_date:
store = TaskStore.get_for_user(self.user)
store.taskd_account.resume()
def __str__(self):
return self.user.username
class Meta:
app_label = "taskmanager"
|
Change mapping to avoid warning
|
Change mapping to avoid warning
|
Python
|
agpl-3.0
|
coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am
|
---
+++
@@ -4,8 +4,8 @@
class UserMetadata(models.Model):
- user = models.ForeignKey(
- User, related_name="metadata", unique=True, on_delete=models.CASCADE
+ user = models.OneToOneField(
+ User, related_name="metadata", on_delete=models.CASCADE
)
tos_version = models.IntegerField(default=0)
tos_accepted = models.DateTimeField(default=None, null=True,)
|
9e41b1b8d19b27cd6bd1bb81fb34c9a3adf30ad5
|
entrypoint.py
|
entrypoint.py
|
#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
if (os.environ.get('K_SERVICE')) != '':
print('Running HTTP service for Google Cloud')
# app = Flask(__name__)
# @app.route('/')
# def ContainerService():
# return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
# app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
else:
print('Running locally')
# stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
# output = stream.read()
# output
|
#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
print('{} {} {}', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
if (os.environ.get('K_SERVICE')) != '':
print('Running HTTP service for Google Cloud')
# app = Flask(__name__)
# @app.route('/')
# def ContainerService():
# return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
# app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
else:
print('Running locally')
# stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
# output = stream.read()
# output
|
Debug Google Cloud Run support
|
Debug Google Cloud Run support
|
Python
|
mit
|
diodesign/diosix
|
---
+++
@@ -21,6 +21,8 @@
from flask import Flask
if __name__ == "__main__":
+ print('{} {} {}', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
+
if (os.environ.get('K_SERVICE')) != '':
print('Running HTTP service for Google Cloud')
# app = Flask(__name__)
|
95fffa0fbe744b9087547a14a97fb7dd0e68ba76
|
chainer/functions/__init__.py
|
chainer/functions/__init__.py
|
# Non-parameterized functions
from accuracy import accuracy
from basic_math import exp, log
from concat import concat
from copy import copy
from dropout import dropout
from identity import identity
from leaky_relu import leaky_relu
from lstm import lstm
from mean_squared_error import mean_squared_error
from pooling_2d import average_pooling_2d, max_pooling_2d
from relu import relu
from sigmoid import sigmoid
from softmax import softmax
from softmax_cross_entropy import softmax_cross_entropy
from sum import sum
from tanh import tanh
# Parameterized layers
from batch_normalization import BatchNormalization
from convolution_2d import Convolution2D
from embed_id import EmbedID
from inception import Inception
from linear import Linear
from parameter import Parameter
from prelu import PReLU
|
"""Collection of :class:`~chainer.Function` implementations."""
# Parameterized function classes
from batch_normalization import BatchNormalization
from convolution_2d import Convolution2D
from embed_id import EmbedID
from inception import Inception
from linear import Linear
from parameter import Parameter
from prelu import PReLU
# Array manipulation functions
from concat import concat
from copy import copy
from dropout import dropout
from identity import identity
# Activation functions
from basic_math import exp, log
from leaky_relu import leaky_relu
from lstm import lstm
from relu import relu
from sigmoid import sigmoid
from softmax import softmax
from tanh import tanh
# Pooling functions
from pooling_2d import average_pooling_2d, max_pooling_2d
# Loss, evaluation and aggregation
from accuracy import accuracy
from mean_squared_error import mean_squared_error
from softmax_cross_entropy import softmax_cross_entropy
from sum import sum
|
Sort function imports to fit with documentation order
|
Sort function imports to fit with documentation order
|
Python
|
mit
|
kikusu/chainer,niboshi/chainer,ttakamura/chainer,wkentaro/chainer,okuta/chainer,cupy/cupy,ktnyt/chainer,nushio3/chainer,cupy/cupy,ytoyama/yans_chainer_hackathon,chainer/chainer,okuta/chainer,jnishi/chainer,keisuke-umezawa/chainer,kashif/chainer,cupy/cupy,cupy/cupy,muupan/chainer,chainer/chainer,masia02/chainer,jnishi/chainer,niboshi/chainer,t-abe/chainer,ktnyt/chainer,keisuke-umezawa/chainer,ttakamura/chainer,niboshi/chainer,tkerola/chainer,tereka114/chainer,chainer/chainer,jnishi/chainer,hvy/chainer,minhpqn/chainer,pfnet/chainer,hvy/chainer,AlpacaDB/chainer,aonotas/chainer,wkentaro/chainer,sou81821/chainer,Kaisuke5/chainer,wavelets/chainer,muupan/chainer,ysekky/chainer,benob/chainer,okuta/chainer,ikasumi/chainer,sinhrks/chainer,wkentaro/chainer,kuwa32/chainer,ktnyt/chainer,woodshop/complex-chainer,laysakura/chainer,jnishi/chainer,yanweifu/chainer,t-abe/chainer,tscohen/chainer,keisuke-umezawa/chainer,benob/chainer,keisuke-umezawa/chainer,anaruse/chainer,umitanuki/chainer,kikusu/chainer,wkentaro/chainer,hvy/chainer,tigerneil/chainer,nushio3/chainer,truongdq/chainer,chainer/chainer,niboshi/chainer,kiyukuta/chainer,woodshop/chainer,AlpacaDB/chainer,ronekko/chainer,rezoo/chainer,sinhrks/chainer,cemoody/chainer,elviswf/chainer,hvy/chainer,okuta/chainer,1986ks/chainer,ktnyt/chainer,hidenori-t/chainer,jfsantos/chainer,bayerj/chainer,truongdq/chainer,delta2323/chainer
|
---
+++
@@ -1,26 +1,34 @@
-# Non-parameterized functions
-from accuracy import accuracy
+"""Collection of :class:`~chainer.Function` implementations."""
+
+# Parameterized function classes
+from batch_normalization import BatchNormalization
+from convolution_2d import Convolution2D
+from embed_id import EmbedID
+from inception import Inception
+from linear import Linear
+from parameter import Parameter
+from prelu import PReLU
+
+# Array manipulation functions
+from concat import concat
+from copy import copy
+from dropout import dropout
+from identity import identity
+
+# Activation functions
from basic_math import exp, log
-from concat import concat
-from copy import copy
-from dropout import dropout
-from identity import identity
from leaky_relu import leaky_relu
from lstm import lstm
-from mean_squared_error import mean_squared_error
-from pooling_2d import average_pooling_2d, max_pooling_2d
from relu import relu
from sigmoid import sigmoid
from softmax import softmax
-from softmax_cross_entropy import softmax_cross_entropy
-from sum import sum
from tanh import tanh
-# Parameterized layers
-from batch_normalization import BatchNormalization
-from convolution_2d import Convolution2D
-from embed_id import EmbedID
-from inception import Inception
-from linear import Linear
-from parameter import Parameter
-from prelu import PReLU
+# Pooling functions
+from pooling_2d import average_pooling_2d, max_pooling_2d
+
+# Loss, evaluation and aggregation
+from accuracy import accuracy
+from mean_squared_error import mean_squared_error
+from softmax_cross_entropy import softmax_cross_entropy
+from sum import sum
|
896f402c79dd3bbe7d5cbc6e59787398a96b3747
|
runtests.py
|
runtests.py
|
import argparse
import os
import sys
# Force this to happen before loading django
try:
os.environ["DJANGO_SETTINGS_MODULE"] = "testtinymce.settings"
test_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, test_dir)
except ImportError:
pass
else:
import django
from django.conf import settings
from django.test.utils import get_runner
def runtests(verbosity=1, failfast=False):
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(interactive=True, verbosity=verbosity, failfast=failfast)
failures = test_runner.run_tests(["tinymce"])
sys.exit(bool(failures))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run the django-tinymce test suite.")
parser.add_argument(
"-v",
"--verbosity",
default=1,
type=int,
choices=[0, 1, 2, 3],
help="Verbosity level; 0=minimal output, 1=normal output, 2=all output",
)
parser.add_argument(
"--failfast",
action="store_true",
help="Stop running the test suite after first failed test.",
)
options = parser.parse_args()
runtests(verbosity=options.verbosity, failfast=options.failfast)
|
import argparse
import os
import sys
# Force this to happen before loading django
try:
os.environ["DJANGO_SETTINGS_MODULE"] = "testtinymce.settings"
test_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, test_dir)
except ImportError:
pass
else:
import django
from django.conf import settings
from django.test.utils import get_runner
def runtests(modules=["tinymce"], verbosity=1, failfast=False):
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(interactive=True, verbosity=verbosity, failfast=failfast)
failures = test_runner.run_tests(modules)
sys.exit(bool(failures))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run the django-tinymce test suite.")
parser.add_argument(
"modules",
nargs="*",
metavar="module",
help='Optional path(s) to test modules; e.g. "tinymce" or '
'"tinymce.tests.test_widgets".',
)
parser.add_argument(
"-v",
"--verbosity",
default=1,
type=int,
choices=[0, 1, 2, 3],
help="Verbosity level; 0=minimal output, 1=normal output, 2=all output",
)
parser.add_argument(
"--failfast",
action="store_true",
help="Stop running the test suite after first failed test.",
)
options = parser.parse_args()
runtests(modules=options.modules, verbosity=options.verbosity, failfast=options.failfast)
|
Add ability to run subset of tests
|
Add ability to run subset of tests
|
Python
|
mit
|
aljosa/django-tinymce,aljosa/django-tinymce,aljosa/django-tinymce,aljosa/django-tinymce
|
---
+++
@@ -15,16 +15,23 @@
from django.test.utils import get_runner
-def runtests(verbosity=1, failfast=False):
+def runtests(modules=["tinymce"], verbosity=1, failfast=False):
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(interactive=True, verbosity=verbosity, failfast=failfast)
- failures = test_runner.run_tests(["tinymce"])
+ failures = test_runner.run_tests(modules)
sys.exit(bool(failures))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run the django-tinymce test suite.")
+ parser.add_argument(
+ "modules",
+ nargs="*",
+ metavar="module",
+ help='Optional path(s) to test modules; e.g. "tinymce" or '
+ '"tinymce.tests.test_widgets".',
+ )
parser.add_argument(
"-v",
"--verbosity",
@@ -39,4 +46,4 @@
help="Stop running the test suite after first failed test.",
)
options = parser.parse_args()
- runtests(verbosity=options.verbosity, failfast=options.failfast)
+ runtests(modules=options.modules, verbosity=options.verbosity, failfast=options.failfast)
|
5e2697b55f1720c4c144840e680004fb28a3cfcc
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'td_biblio',
),
ROOT_URLCONF='td_biblio.urls',
SITE_ID=1,
SECRET_KEY='this-is-just-for-tests-so-not-that-secret',
)
from django.test.utils import get_runner
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(['td_biblio', ])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'td_biblio',
),
ROOT_URLCONF='td_biblio.urls',
SITE_ID=1,
SECRET_KEY='this-is-just-for-tests-so-not-that-secret',
)
from django.test.utils import get_runner
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests([
'td_biblio.tests.test_commands',
'td_biblio.tests.test_factories',
'td_biblio.tests.test_models',
'td_biblio.tests.test_views',
])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
Add more flexibity to run tests independantly
|
Add more flexibity to run tests independantly
|
Python
|
mit
|
TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio
|
---
+++
@@ -29,7 +29,12 @@
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
- failures = test_runner.run_tests(['td_biblio', ])
+ failures = test_runner.run_tests([
+ 'td_biblio.tests.test_commands',
+ 'td_biblio.tests.test_factories',
+ 'td_biblio.tests.test_models',
+ 'td_biblio.tests.test_views',
+ ])
sys.exit(failures)
|
946220075802cc59f3b34d3557c0b749c526c4b1
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "farnsworth.settings")
this_dir = os.path.abspath(os.path.dirname(__file__))
if this_dir not in sys.path:
sys.path.insert(0, this_dir)
from django.test.utils import get_runner
from django.conf import settings
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(["base", "threads", "events", "managers"])
sys.exit(bool(failures))
if __name__ == "__main__":
runtests()
|
#!/usr/bin/env python
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "farnsworth.settings")
this_dir = os.path.abspath(os.path.dirname(__file__))
if this_dir not in sys.path:
sys.path.insert(0, this_dir)
from django.test.utils import get_runner
from django.conf import settings
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests([
"base",
"threads",
"events",
"managers",
"workshift",
])
sys.exit(bool(failures))
if __name__ == "__main__":
runtests()
|
Add workshift to the list of tests
|
Add workshift to the list of tests
|
Python
|
bsd-2-clause
|
knagra/farnsworth,knagra/farnsworth,knagra/farnsworth,knagra/farnsworth
|
---
+++
@@ -14,7 +14,13 @@
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
- failures = test_runner.run_tests(["base", "threads", "events", "managers"])
+ failures = test_runner.run_tests([
+ "base",
+ "threads",
+ "events",
+ "managers",
+ "workshift",
+ ])
sys.exit(bool(failures))
if __name__ == "__main__":
|
45e758b56370f5bb34ff28c4660837fd9037b945
|
dom/automation/detect_malloc_errors.py
|
dom/automation/detect_malloc_errors.py
|
#!/usr/bin/env python
# Look for "szone_error" (Tiger), "malloc_error_break" (Leopard), "MallocHelp" (?)
# which are signs of malloc being unhappy (double free, out-of-memory, etc).
def amiss(logPrefix):
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
pline = ""
ppline = ""
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (-1 != line.find("szone_error")
or -1 != line.find("malloc_error_break")
or -1 != line.find("MallocHelp")):
if (-1 != pline.find("can't allocate region")):
print ""
print ppline
print pline
print line
foundSomething = True
break # Don't flood the log with repeated malloc failures
ppline = pline
pline = line
currentFile.close()
return foundSomething
|
#!/usr/bin/env python
# Look for "szone_error" (Tiger), "malloc_error_break" (Leopard), "MallocHelp" (?)
# which are signs of malloc being unhappy (double free, out-of-memory, etc).
def amiss(logPrefix):
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
pline = ""
ppline = ""
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (-1 != line.find("szone_error")
or -1 != line.find("malloc_error_break")
or -1 != line.find("MallocHelp")):
if (-1 == pline.find("can't allocate region")):
print ""
print ppline
print pline
print line
foundSomething = True
break # Don't flood the log with repeated malloc failures
ppline = pline
pline = line
currentFile.close()
return foundSomething
|
Fix reversed condition for ignoring "can't allocate region" errors
|
Fix reversed condition for ignoring "can't allocate region" errors
|
Python
|
mpl-2.0
|
MozillaSecurity/funfuzz,nth10sd/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz
|
---
+++
@@ -17,7 +17,7 @@
if (-1 != line.find("szone_error")
or -1 != line.find("malloc_error_break")
or -1 != line.find("MallocHelp")):
- if (-1 != pline.find("can't allocate region")):
+ if (-1 == pline.find("can't allocate region")):
print ""
print ppline
print pline
|
94788bd7a7ba0a7799689c4613a2afbcc377649b
|
games/migrations/0016_auto_20161209_1256.py
|
games/migrations/0016_auto_20161209_1256.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-12-09 11:56
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def create_revisions(apps, schema_editor):
call_command('createinitialrevisions')
class Migration(migrations.Migration):
dependencies = [
('games', '0015_installer_draft'),
]
operations = [
migrations.RunPython(create_revisions)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-12-09 11:56
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def create_revisions(apps, schema_editor):
call_command('createinitialrevisions')
class Migration(migrations.Migration):
dependencies = [
('games', '0015_installer_draft'),
('reversion', '0001_squashed_0004_auto_20160611_1202'),
]
operations = [
migrations.RunPython(create_revisions)
]
|
Add dependency to reversion data migration
|
Add dependency to reversion data migration
|
Python
|
agpl-3.0
|
Turupawn/website,Turupawn/website,lutris/website,lutris/website,lutris/website,Turupawn/website,Turupawn/website,lutris/website
|
---
+++
@@ -14,6 +14,7 @@
dependencies = [
('games', '0015_installer_draft'),
+ ('reversion', '0001_squashed_0004_auto_20160611_1202'),
]
operations = [
|
4c3a2a61c6a8cb5e0ece14bced4ec8b33df45400
|
tests/simple/_util.py
|
tests/simple/_util.py
|
#######################################################
# Copyright (c) 2015, ArrayFire
# All rights reserved.
#
# This file is distributed under 3-clause BSD license.
# The complete license agreement can be obtained at:
# http://arrayfire.com/licenses/BSD-3-Clause
########################################################
import arrayfire as af
def display_func(verbose):
if (verbose):
return af.display
else:
def eval_func(foo):
res = foo
return eval_func
def print_func(verbose):
def print_func_impl(*args):
if (verbose):
print(args)
else:
res = [args]
return print_func_impl
class _simple_test_dict(dict):
def __init__(self):
self.print_str = "Simple %16s: %s"
super(_simple_test_dict, self).__init__()
def run(self, name_list=None, verbose=False):
test_list = name_list if name_list is not None else self.keys()
for key in test_list:
try:
test = self[key]
except:
print(self.print_str % (key, "NOTFOUND"))
continue
try:
test(verbose)
print(self.print_str % (key, "PASSED"))
except:
print(self.print_str % (key, "FAILED"))
tests = _simple_test_dict()
|
#######################################################
# Copyright (c) 2015, ArrayFire
# All rights reserved.
#
# This file is distributed under 3-clause BSD license.
# The complete license agreement can be obtained at:
# http://arrayfire.com/licenses/BSD-3-Clause
########################################################
import traceback
import logging
import arrayfire as af
def display_func(verbose):
if (verbose):
return af.display
else:
def eval_func(foo):
res = foo
return eval_func
def print_func(verbose):
def print_func_impl(*args):
if (verbose):
print(args)
else:
res = [args]
return print_func_impl
class _simple_test_dict(dict):
def __init__(self):
self.print_str = "Simple %16s: %s"
super(_simple_test_dict, self).__init__()
def run(self, name_list=None, verbose=False):
test_list = name_list if name_list is not None else self.keys()
for key in test_list:
try:
test = self[key]
except:
print(self.print_str % (key, "NOTFOUND"))
continue
try:
test(verbose)
print(self.print_str % (key, "PASSED"))
except Exception as e:
print(self.print_str % (key, "FAILED"))
if (verbose):
logging.error(traceback.format_exc())
tests = _simple_test_dict()
|
Add proper logging to tests when in verbose mode
|
Add proper logging to tests when in verbose mode
|
Python
|
bsd-3-clause
|
arrayfire/arrayfire_python,pavanky/arrayfire-python,arrayfire/arrayfire-python
|
---
+++
@@ -7,6 +7,8 @@
# http://arrayfire.com/licenses/BSD-3-Clause
########################################################
+import traceback
+import logging
import arrayfire as af
def display_func(verbose):
@@ -34,7 +36,6 @@
def run(self, name_list=None, verbose=False):
test_list = name_list if name_list is not None else self.keys()
for key in test_list:
-
try:
test = self[key]
except:
@@ -44,7 +45,10 @@
try:
test(verbose)
print(self.print_str % (key, "PASSED"))
- except:
+ except Exception as e:
print(self.print_str % (key, "FAILED"))
+ if (verbose):
+ logging.error(traceback.format_exc())
+
tests = _simple_test_dict()
|
361af42be2c3044a15480572befb1405a603b4ab
|
VALDprepare.py
|
VALDprepare.py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# My imports
import argparse
import gzip
def _parser():
parser = argparse.ArgumentParser(description='Prepare the data downloaded '
'from VALD.')
parser.add_argument('input', help='input compressed file')
parser.add_argument('-o', '--output',
help='Optional output',
default=False)
return parser.parse_args()
def main(input, output=False):
if not isinstance(input, str):
raise TypeError('Input must be a str. A %s was parsed' % type(input))
if not isinstance(output, str) and output:
raise TypeError('Output must be a str. A %s was parsed' % type(output))
# TODO: Check if the input exists
fname = input.rpartition('.')[0]
if not output:
output = '%s.dat' % fname
oref = '%s.ref' % fname
fout = ''
fref = ''
with gzip.open(input, 'r') as lines:
for i, line in enumerate(lines):
if i < 2:
fout += '# %s' % line.replace("'", '')
else:
fout += line.replace("'", '')
if 'References' in line:
break
with open(output, 'w') as fo:
fo.write(fout)
if __name__ == '__main__':
args = _parser()
input, output = args.input, args.output
main(input, output)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# My imports
import argparse
import gzip
import os
def _parser():
parser = argparse.ArgumentParser(description='Prepare the data downloaded '
'from VALD.')
parser.add_argument('input', help='input compressed file', type=str)
parser.add_argument('-o', '--output',
help='Optional output',
default=False, type=str)
return parser.parse_args()
def main(input, output=False):
if not os.path.isfile(input):
raise IOError('File: %s does not exists' % input)
fname = input.rpartition('.')[0]
if not output:
output = '%s.dat' % fname
oref = '%s.ref' % fname
fout = ''
fref = ''
with gzip.open(input, 'r') as lines:
for i, line in enumerate(lines):
if i < 2:
fout += '# %s' % line.replace("'", '')
else:
fout += line.replace("'", '')
if 'References' in line:
break
with open(output, 'w') as fo:
fo.write(fout)
if __name__ == '__main__':
args = _parser()
input, output = args.input, args.output
main(input, output)
|
Check if the file exists before doing anything else.
|
Check if the file exists before doing anything else.
|
Python
|
mit
|
DanielAndreasen/astro_scripts
|
---
+++
@@ -4,25 +4,23 @@
# My imports
import argparse
import gzip
+import os
def _parser():
parser = argparse.ArgumentParser(description='Prepare the data downloaded '
'from VALD.')
- parser.add_argument('input', help='input compressed file')
+ parser.add_argument('input', help='input compressed file', type=str)
parser.add_argument('-o', '--output',
help='Optional output',
- default=False)
+ default=False, type=str)
return parser.parse_args()
def main(input, output=False):
- if not isinstance(input, str):
- raise TypeError('Input must be a str. A %s was parsed' % type(input))
- if not isinstance(output, str) and output:
- raise TypeError('Output must be a str. A %s was parsed' % type(output))
- # TODO: Check if the input exists
+ if not os.path.isfile(input):
+ raise IOError('File: %s does not exists' % input)
fname = input.rpartition('.')[0]
if not output:
|
62f681803401d05fd0a5e554d4d6c7210dcc7c17
|
cbv/management/commands/load_all_django_versions.py
|
cbv/management/commands/load_all_django_versions.py
|
import os
import re
from django.conf import settings
from django.core.management import call_command, BaseCommand
class Command(BaseCommand):
"""Load the Django project fixtures and all version fixtures"""
def handle(self, **options):
fixtures_dir = os.path.join(settings.DIRNAME, 'cbv', 'fixtures')
self.stdout.write('Loading project.json')
call_command('loaddata', 'cbv/fixtures/project.json')
version_fixtures = [re.match(r'((?:\d+\.){2,3}json)', filename) for filename in os.listdir(fixtures_dir)]
for match in version_fixtures:
try:
fixture = match.group()
except AttributeError:
continue
self.stdout.write('Loading {}'.format(fixture))
call_command('loaddata', 'cbv/fixtures/{}'.format(fixture))
|
import glob
import os
from django.core.management import call_command, BaseCommand
class Command(BaseCommand):
"""Load the Django project fixtures and all version fixtures"""
def handle(self, **options):
self.stdout.write('Loading project.json')
call_command('loaddata', 'cbv/fixtures/project.json')
version_fixtures = glob.glob(os.path.join('cbv', 'fixtures', '*.*.*json'))
for fixture in version_fixtures:
self.stdout.write('Loading {}'.format(fixture))
call_command('loaddata', fixture)
|
Use glob for finding version fixtures
|
Use glob for finding version fixtures
Thanks @ghickman!
|
Python
|
bsd-2-clause
|
refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector
|
---
+++
@@ -1,7 +1,6 @@
+import glob
import os
-import re
-from django.conf import settings
from django.core.management import call_command, BaseCommand
@@ -9,14 +8,9 @@
"""Load the Django project fixtures and all version fixtures"""
def handle(self, **options):
- fixtures_dir = os.path.join(settings.DIRNAME, 'cbv', 'fixtures')
self.stdout.write('Loading project.json')
call_command('loaddata', 'cbv/fixtures/project.json')
- version_fixtures = [re.match(r'((?:\d+\.){2,3}json)', filename) for filename in os.listdir(fixtures_dir)]
- for match in version_fixtures:
- try:
- fixture = match.group()
- except AttributeError:
- continue
+ version_fixtures = glob.glob(os.path.join('cbv', 'fixtures', '*.*.*json'))
+ for fixture in version_fixtures:
self.stdout.write('Loading {}'.format(fixture))
- call_command('loaddata', 'cbv/fixtures/{}'.format(fixture))
+ call_command('loaddata', fixture)
|
b6416ba4c32aaeddb567be4486854d6415c3048e
|
tornwamp/customize.py
|
tornwamp/customize.py
|
"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
"""
TornWAMP user-configurable structures.
"""
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc
from tornwamp.messages import Code
processors = {
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
Code.CALL: rpc.CallProcessor,
Code.PUBLISH: pubsub.PublishProcessor
}
# 2: 'welcome',
# 3: 'abort',
# 4: 'challenge',
# 5: 'authenticate',
# 7: 'heartbeat',
# 8: 'error',
# 16: 'publish',
# 17: 'published',
# 32: 'subscribe',
# 33: 'subscribed',
# 34: 'unsubscribe',
# 35: 'unsubscribed',
# 36: 'event',
# 49: 'cancel',
# 50: 'result',
# 64: 'register',
# 65: 'registered',
# 66: 'unregister',
# 67: 'unregistered',
# 68: 'invocation',
# 69: 'interrupt',
# 70: 'yield'
|
Add PublishProcessor to processors' list
|
Add PublishProcessor to processors' list
|
Python
|
apache-2.0
|
ef-ctx/tornwamp
|
---
+++
@@ -8,7 +8,8 @@
Code.HELLO: HelloProcessor,
Code.GOODBYE: GoodbyeProcessor,
Code.SUBSCRIBE: pubsub.SubscribeProcessor,
- Code.CALL: rpc.CallProcessor
+ Code.CALL: rpc.CallProcessor,
+ Code.PUBLISH: pubsub.PublishProcessor
}
# 2: 'welcome',
# 3: 'abort',
|
369964986df0ca558c2e340bc8d15272296af67e
|
tools/debug_launcher.py
|
tools/debug_launcher.py
|
from __future__ import print_function
import sys
import os
import time
import socket
import argparse
import subprocess
parser = argparse.ArgumentParser()
parser.add_argument('--launch-adapter')
parser.add_argument('--lldb')
parser.add_argument('--wait-port')
args = parser.parse_args()
if args.launch_adapter:
lldb = args.lldb or 'lldb'
cmd = [lldb, '-b',
'-O', 'command script import %s' % args.launch_adapter,
'-O', 'script import ptvsd; ptvsd.enable_attach(address=("0.0.0.0", 3000)); ptvsd.wait_for_attach(); adapter.run_tcp_session(4711)',
]
print('Launching', cmd)
subprocess.Popen(cmd, preexec_fn=lambda: os.setsid())
if args.wait_port:
port = int(args.wait_port)
print('Waiting for port %d' % port)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while True:
result = sock.connect_ex(('127.0.0.1', port))
if result == 0:
break
time.sleep(0.5)
print('Port opened')
sock.shutdown(socket.SHUT_WR)
sock.close()
|
from __future__ import print_function
import sys
import os
import time
import socket
import argparse
import subprocess
parser = argparse.ArgumentParser()
parser.add_argument('--launch-adapter')
parser.add_argument('--lldb')
parser.add_argument('--wait-port')
args = parser.parse_args()
if args.launch_adapter:
lldb = args.lldb or 'lldb'
cmd = [lldb, '-b',
'-O', 'command script import %s' % args.launch_adapter,
'-O', 'script sys.argv=["lldb"]; import ptvsd; ptvsd.enable_attach(address=("0.0.0.0", 3000)); ptvsd.wait_for_attach()',
'-O', 'script adapter.run_tcp_session(4711)',
]
print('Launching', cmd)
if sys.platform != 'win32':
subprocess.Popen(cmd, preexec_fn=lambda: os.setsid())
else:
subprocess.Popen(cmd, creationflags=subprocess.CREATE_NEW_CONSOLE)
if args.wait_port:
port = int(args.wait_port)
print('Waiting for port %d' % port)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while True:
result = sock.connect_ex(('127.0.0.1', port))
if result == 0:
break
time.sleep(0.5)
print('Port opened')
sock.shutdown(socket.SHUT_WR)
sock.close()
|
Fix python debugging on Windows.
|
Fix python debugging on Windows.
|
Python
|
mit
|
vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb
|
---
+++
@@ -17,10 +17,14 @@
lldb = args.lldb or 'lldb'
cmd = [lldb, '-b',
'-O', 'command script import %s' % args.launch_adapter,
- '-O', 'script import ptvsd; ptvsd.enable_attach(address=("0.0.0.0", 3000)); ptvsd.wait_for_attach(); adapter.run_tcp_session(4711)',
+ '-O', 'script sys.argv=["lldb"]; import ptvsd; ptvsd.enable_attach(address=("0.0.0.0", 3000)); ptvsd.wait_for_attach()',
+ '-O', 'script adapter.run_tcp_session(4711)',
]
print('Launching', cmd)
- subprocess.Popen(cmd, preexec_fn=lambda: os.setsid())
+ if sys.platform != 'win32':
+ subprocess.Popen(cmd, preexec_fn=lambda: os.setsid())
+ else:
+ subprocess.Popen(cmd, creationflags=subprocess.CREATE_NEW_CONSOLE)
if args.wait_port:
port = int(args.wait_port)
|
d4aa2b1a0a72696ce34f5aa2f5e588fc3a72e622
|
cfgrib/__main__.py
|
cfgrib/__main__.py
|
import argparse
import sys
from . import eccodes
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--selfcheck', default=False, action='store_true')
args = parser.parse_args()
if args.selfcheck:
eccodes.codes_get_api_version()
print("Your system is ready.")
else:
raise RuntimeError("Command not recognised. See usage with --help.")
if __name__ == '__main__':
main()
|
#
# Copyright 2017-2018 European Centre for Medium-Range Weather Forecasts (ECMWF).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# Alessandro Amici - B-Open - https://bopen.eu
#
import argparse
from . import eccodes
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--selfcheck', default=False, action='store_true')
args = parser.parse_args()
if args.selfcheck:
eccodes.codes_get_api_version()
print("Your system is ready.")
else:
raise RuntimeError("Command not recognised. See usage with --help.")
if __name__ == '__main__':
main()
|
Add copyright noticeand Authors comment.
|
Add copyright noticeand Authors comment.
|
Python
|
apache-2.0
|
ecmwf/cfgrib
|
---
+++
@@ -1,6 +1,23 @@
+#
+# Copyright 2017-2018 European Centre for Medium-Range Weather Forecasts (ECMWF).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Authors:
+# Alessandro Amici - B-Open - https://bopen.eu
+#
import argparse
-import sys
from . import eccodes
|
9fec06c6acf57b4d49b9c49b7e1d3b5c90e2c9c4
|
blog/admin.py
|
blog/admin.py
|
from django.contrib import admin
from .models import Post
@admin.register(Post)
class PostAdmin(admin.ModelAdmin):
# list view
date_hierarchy = 'pub_date'
list_display = ('title', 'pub_date')
list_filter = ('pub_date',)
search_fields = ('title', 'text')
# form view
fieldsets = (
(None, {
'fields': (
'title', 'slug', 'author', 'text',
)}),
('Related', {
'fields': (
'tags', 'startups')}),
)
filter_horizontal = ('startups',)
filter_vertical = ('tags',)
prepopulated_fields = {"slug": ("title",)}
|
from django.contrib import admin
from .models import Post
@admin.register(Post)
class PostAdmin(admin.ModelAdmin):
# list view
date_hierarchy = 'pub_date'
list_display = ('title', 'pub_date')
list_filter = ('pub_date',)
search_fields = ('title', 'text')
# form view
fieldsets = (
(None, {
'fields': (
'title', 'slug', 'author', 'text',
)}),
('Related', {
'fields': (
'tags', 'startups')}),
)
filter_horizontal = ('tags', 'startups',)
prepopulated_fields = {"slug": ("title",)}
|
Use horizontal filter for M2M in PostAdmin.
|
Ch23: Use horizontal filter for M2M in PostAdmin.
|
Python
|
bsd-2-clause
|
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
|
---
+++
@@ -20,6 +20,5 @@
'fields': (
'tags', 'startups')}),
)
- filter_horizontal = ('startups',)
- filter_vertical = ('tags',)
+ filter_horizontal = ('tags', 'startups',)
prepopulated_fields = {"slug": ("title",)}
|
b0273cc12abaf9a3f9f2e6c534d82bd7581c240e
|
ctypeslib/test/test_dynmodule.py
|
ctypeslib/test/test_dynmodule.py
|
# Basic test of dynamic code generation
import unittest
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
if __name__ == "__main__":
unittest.main()
|
# Basic test of dynamic code generation
import unittest
import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def tearDown(self):
for fnm in glob.glob(stdio._gen_basename + ".*"):
try:
os.remove(fnm)
except IOError:
pass
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
if __name__ == "__main__":
unittest.main()
|
Clean up generated files in the tearDown method.
|
Clean up generated files in the tearDown method.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@52711 6015fed2-1504-0410-9fe1-9d1591cc4771
|
Python
|
mit
|
trolldbois/ctypeslib,trolldbois/ctypeslib,luzfcb/ctypeslib,trolldbois/ctypeslib,luzfcb/ctypeslib,luzfcb/ctypeslib
|
---
+++
@@ -1,10 +1,18 @@
# Basic test of dynamic code generation
import unittest
+import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
+ def tearDown(self):
+ for fnm in glob.glob(stdio._gen_basename + ".*"):
+ try:
+ os.remove(fnm)
+ except IOError:
+ pass
+
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
|
0945e04edcb4739069f4263bbd022bff4320606e
|
examples/LKE_example.py
|
examples/LKE_example.py
|
# for local run, before pygraphc packaging
import sys
sys.path.insert(0, '../pygraphc/misc')
from LKE import *
sys.path.insert(0, '../pygraphc/clustering')
from ClusterUtility import *
from ClusterEvaluation import *
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'lke-result-' + ip_address + '.txt'
OutputPath = './results'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
myparser = LKE(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.get_logs()
ClusterUtility.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ClusterEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
print homogeneity_completeness_vmeasure
print ('The running time of LKE is', time)
|
# for local run, before pygraphc packaging
import sys
sys.path.insert(0, '../pygraphc/misc')
from LKE import *
sys.path.insert(0, '../pygraphc/evaluation')
from ExternalEvaluation import *
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'lke-result-' + ip_address + '.txt'
OutputPath = './results'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
myparser = LKE(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
print homogeneity_completeness_vmeasure
print ('The running time of LKE is', time)
|
Change module path for cluster evaluation and edit how to get original logs
|
Change module path for cluster evaluation and edit how to get original logs
|
Python
|
mit
|
studiawan/pygraphc
|
---
+++
@@ -2,9 +2,8 @@
import sys
sys.path.insert(0, '../pygraphc/misc')
from LKE import *
-sys.path.insert(0, '../pygraphc/clustering')
-from ClusterUtility import *
-from ClusterEvaluation import *
+sys.path.insert(0, '../pygraphc/evaluation')
+from ExternalEvaluation import *
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/' + ip_address
@@ -17,10 +16,10 @@
myparser = LKE(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
-original_logs = myparser.get_logs()
+original_logs = myparser.logs
-ClusterUtility.set_cluster_label_id(None, clusters, original_logs, prediction_file)
-homogeneity_completeness_vmeasure = ClusterEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
+ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
+homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
print homogeneity_completeness_vmeasure
|
7c49517c3c24d239c2bd44d82916b4f3d90ca1e2
|
utilities/__init__.py
|
utilities/__init__.py
|
#! /usr/bin/env python
from subprocess import Popen, PIPE
def launch(cmd):
"""
Fork the specified command, returning a tuple of (stdout, stderr)
"""
return Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
def get_stdout(cmd):
"""
Fork the specified command, returning stdout
"""
return launch(cmd)[0]
def get_stderr(cmd):
"""
Fork the specified command, returning stderr
"""
return launch(cmd)[1]
|
#! /usr/bin/env python
from subprocess import Popen, PIPE
def popen(cmd):
"""
Fork the specified command, returning a tuple of (stdout, stderr)
"""
return Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
def get_stdout(cmd):
"""
Fork the specified command, returning stdout
"""
return popen(cmd)[0]
def get_stderr(cmd):
"""
Fork the specified command, returning stderr
"""
return popen(cmd)[1]
|
Switch to using popen as the function name to stick more to subprocess naming
|
Switch to using popen as the function name to stick more to subprocess naming
|
Python
|
mit
|
IanLee1521/utilities
|
---
+++
@@ -3,7 +3,7 @@
from subprocess import Popen, PIPE
-def launch(cmd):
+def popen(cmd):
"""
Fork the specified command, returning a tuple of (stdout, stderr)
"""
@@ -14,11 +14,11 @@
"""
Fork the specified command, returning stdout
"""
- return launch(cmd)[0]
+ return popen(cmd)[0]
def get_stderr(cmd):
"""
Fork the specified command, returning stderr
"""
- return launch(cmd)[1]
+ return popen(cmd)[1]
|
d147d8865dc4b82eaff87d0d4dd65ba7f4622a90
|
django/contrib/admin/__init__.py
|
django/contrib/admin/__init__.py
|
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
# ACTION_CHECKBOX_NAME is unused, but should stay since its import from here
# has been referenced in documentation.
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
|
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
git-svn-id: http://code.djangoproject.com/svn/django/trunk@14359 bcc190cf-cafb-0310-a4f2-bffc1f526a37
--HG--
extra : convert_revision : e026073455a73c9fe9a9f026b76ac783b2a12d23
|
Python
|
bsd-3-clause
|
adieu/django-nonrel,heracek/django-nonrel,adieu/django-nonrel,heracek/django-nonrel,adieu/django-nonrel,heracek/django-nonrel
|
---
+++
@@ -1,3 +1,6 @@
+# ACTION_CHECKBOX_NAME is unused, but should stay since its import from here
+# has been referenced in documentation.
+from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
|
4f05805c0ec31da0b978cdccc0d79336272859fe
|
node/multi_var.py
|
node/multi_var.py
|
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
self.args = max([node_1.args, node_2.args])
def prepare(self, stack):
if len(stack) == 0:
self.add_arg(stack)
@Node.is_func
def apply(self, *stack):
self.node_2.prepare(stack)
rtn = self.node_2(stack[:self.node_2.args])
self.node_1.prepare(stack)
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
|
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
def prepare(self, stack):
self.node_1.prepare(stack)
self.node_2.prepare(stack)
self.args = max([self.node_1.args,self.node_2.args])
@Node.is_func
def apply(self, *stack):
rtn = self.node_2(stack[:self.node_2.args])
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
|
Fix multivar for nodes with variable length stacks
|
Fix multivar for nodes with variable length stacks
|
Python
|
mit
|
muddyfish/PYKE,muddyfish/PYKE
|
---
+++
@@ -10,16 +10,14 @@
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
- self.args = max([node_1.args, node_2.args])
def prepare(self, stack):
- if len(stack) == 0:
- self.add_arg(stack)
+ self.node_1.prepare(stack)
+ self.node_2.prepare(stack)
+ self.args = max([self.node_1.args,self.node_2.args])
@Node.is_func
def apply(self, *stack):
- self.node_2.prepare(stack)
rtn = self.node_2(stack[:self.node_2.args])
- self.node_1.prepare(stack)
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
|
3518e9088ecbbc273f922ba418d2962d6af2dda5
|
feature_extraction/measurements/texture_haralick.py
|
feature_extraction/measurements/texture_haralick.py
|
from . import Measurement
import feature_extraction.util.cleanup as cleanup
class HaralickTexture(Measurement):
def compute(self, image):
return []
|
from . import Measurement
import feature_extraction.util.cleanup as cleanup
from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
default_options = {
'clip_cell_borders': True,
'erode_cell': False,
'erode_cell_amount': False,
}
def __init__(self, options=None):
super(HaralickTexture, self).__init__(options)
def compute(self, image):
# -- preprocessing
if self.options.clip_cell_borders:
# get the cell boundary mask
mask = cleanup.cell_boundary_mask(image)
# if we're told to, erode the mask with a disk by some amount
if self.options.erode_cell:
mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
# mask the image
image = image[mask]
# -- haralick setup and run
return []
|
Add cell-boundary preprocessing to HaralickTexture measurement
|
Add cell-boundary preprocessing to HaralickTexture measurement
|
Python
|
apache-2.0
|
widoptimization-willett/feature-extraction
|
---
+++
@@ -1,6 +1,29 @@
from . import Measurement
import feature_extraction.util.cleanup as cleanup
+from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
+ default_options = {
+ 'clip_cell_borders': True,
+ 'erode_cell': False,
+ 'erode_cell_amount': False,
+ }
+ def __init__(self, options=None):
+ super(HaralickTexture, self).__init__(options)
+
def compute(self, image):
+ # -- preprocessing
+ if self.options.clip_cell_borders:
+ # get the cell boundary mask
+ mask = cleanup.cell_boundary_mask(image)
+
+ # if we're told to, erode the mask with a disk by some amount
+ if self.options.erode_cell:
+ mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
+
+ # mask the image
+ image = image[mask]
+
+ # -- haralick setup and run
+
return []
|
b08315337e71737a36e3e79da99ce167620711b9
|
photodaemon.py
|
photodaemon.py
|
#!/bin/env python
import picamera
import redis
import time
import json
import os
def take_photo():
print "%s Capture photo" % (time.strftime('%Y.%m.%d %H:%M:%S %Z'))
camera = picamera.PiCamera()
camera.vflip = True
camera.resolution = (1280, 720)
time.sleep(1)
camera.capture('static/photo.jpg')
camera.close()
r.publish('photo', time.time())
print "%s Capture done" % (time.strftime('%Y.%m.%d %H:%M:%S %Z'))
def get_config():
with open(os.path.join(os.path.dirname(__file__), 'config.json')) as data_file:
return json.load(data_file)
def main():
config = get_config()
r = redis.StrictRedis(host=config['host'], port=config['port'], db=config['db'])
p = r.pubsub()
p.subscribe('take-photo')
while True:
message = p.get_message()
if message and message['type'] == 'message':
take_photo()
time.sleep(0.1)
if __name__ == '__main__':
main()
|
#!/bin/env python
import picamera
import redis
import time
import json
import os
def take_photo():
print "%s Capture photo" % (time.strftime('%Y.%m.%d %H:%M:%S %Z'))
camera = picamera.PiCamera()
camera.vflip = True
camera.resolution = (1280, 720)
time.sleep(1)
camera.capture('static/photo.jpg')
camera.close()
print "%s Capture done" % (time.strftime('%Y.%m.%d %H:%M:%S %Z'))
def get_config():
with open(os.path.join(os.path.dirname(__file__), 'config.json')) as data_file:
return json.load(data_file)
def main():
config = get_config()
r = redis.StrictRedis(host=config['host'], port=config['port'], db=config['db'])
p = r.pubsub()
p.subscribe('take-photo')
while True:
message = p.get_message()
if message and message['type'] == 'message':
take_photo()
r.publish('photo', time.time())
time.sleep(0.1)
if __name__ == '__main__':
main()
|
Fix publishing photo creation event
|
Fix publishing photo creation event
|
Python
|
mit
|
Ajnasz/pippo,Ajnasz/pippo,Ajnasz/pippo
|
---
+++
@@ -14,7 +14,6 @@
time.sleep(1)
camera.capture('static/photo.jpg')
camera.close()
- r.publish('photo', time.time())
print "%s Capture done" % (time.strftime('%Y.%m.%d %H:%M:%S %Z'))
def get_config():
@@ -32,6 +31,7 @@
message = p.get_message()
if message and message['type'] == 'message':
take_photo()
+ r.publish('photo', time.time())
time.sleep(0.1)
|
b2fbb48049abbfff7f1636059f8ad7eda07667c7
|
test/single_system/all.py
|
test/single_system/all.py
|
import sys, unittest
import bmc_test
import power_test
import xmlrunner
tests = []
tests.extend(bmc_test.tests)
#tests.extend(power_test.tests)
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
xmlrunner.XMLTestRunner(verbose = 1, output='test-reports').run(suite)
|
import sys, unittest, os
import bmc_test
import power_test
import xmlrunner
tests = []
tests.extend(bmc_test.tests)
#tests.extend(power_test.tests)
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
result = xmlrunner.XMLTestRunner(verbose = 1, output='test-reports').run(suite)
if result.failures or result.errors:
os.sys.exit(1)
|
Return a bad error code when a test fails
|
Return a bad error code when a test fails
|
Python
|
bsd-3-clause
|
Cynerva/pyipmi,emaadmanzoor/pyipmi
|
---
+++
@@ -1,4 +1,4 @@
-import sys, unittest
+import sys, unittest, os
import bmc_test
import power_test
import xmlrunner
@@ -11,4 +11,6 @@
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
- xmlrunner.XMLTestRunner(verbose = 1, output='test-reports').run(suite)
+ result = xmlrunner.XMLTestRunner(verbose = 1, output='test-reports').run(suite)
+ if result.failures or result.errors:
+ os.sys.exit(1)
|
6046de052e1f19d2b7cdd3d86f921ac3c16ce338
|
usaidmmc/__init__.py
|
usaidmmc/__init__.py
|
from __future__ import absolute_import
from usaidmmc.celery import app as celery_app
|
from __future__ import absolute_import
from usaidmmc.celery import app as celery_app # flake8: noqa
|
Stop flake8 complaining about task importer
|
Stop flake8 complaining about task importer
|
Python
|
bsd-3-clause
|
praekelt/django-usaid-mmc,praekelt/django-usaid-mmc
|
---
+++
@@ -1,2 +1,2 @@
from __future__ import absolute_import
-from usaidmmc.celery import app as celery_app
+from usaidmmc.celery import app as celery_app # flake8: noqa
|
94b55ead63523f7f5677989f1a4999994b205cdf
|
src/runcommands/util/enums.py
|
src/runcommands/util/enums.py
|
import enum
import subprocess
from blessings import Terminal
TERM = Terminal()
class Color(enum.Enum):
none = ""
reset = TERM.normal
black = TERM.black
red = TERM.red
green = TERM.green
yellow = TERM.yellow
blue = TERM.blue
magenta = TERM.magenta
cyan = TERM.cyan
white = TERM.white
def __str__(self):
return self.value
class StreamOptions(enum.Enum):
"""Choices for stream handling."""
capture = "capture"
hide = "hide"
none = "none"
@property
def option(self):
return {
"capture": subprocess.PIPE,
"hide": subprocess.DEVNULL,
"none": None,
}[self.value]
|
import enum
import os
import subprocess
import sys
from blessings import Terminal
from .misc import isatty
if not (isatty(sys.stdout) and os.getenv("TERM")):
class Terminal:
def __getattr__(self, name):
return ""
TERM = Terminal()
class Color(enum.Enum):
none = ""
reset = TERM.normal
black = TERM.black
red = TERM.red
green = TERM.green
yellow = TERM.yellow
blue = TERM.blue
magenta = TERM.magenta
cyan = TERM.cyan
white = TERM.white
def __str__(self):
return self.value
class StreamOptions(enum.Enum):
"""Choices for stream handling."""
capture = "capture"
hide = "hide"
none = "none"
@property
def option(self):
return {
"capture": subprocess.PIPE,
"hide": subprocess.DEVNULL,
"none": None,
}[self.value]
|
Check for TTY and TERM when setting up Color enum
|
Check for TTY and TERM when setting up Color enum
Amends 0d27649df30419a79ca063ee3e47073f2ba8330e
|
Python
|
mit
|
wylee/runcommands,wylee/runcommands
|
---
+++
@@ -1,7 +1,18 @@
import enum
+import os
import subprocess
+import sys
from blessings import Terminal
+
+from .misc import isatty
+
+
+if not (isatty(sys.stdout) and os.getenv("TERM")):
+
+ class Terminal:
+ def __getattr__(self, name):
+ return ""
TERM = Terminal()
|
621ca7bebfcc53026d8f98b9f6cfefe6ff25961b
|
src/util/constants.py
|
src/util/constants.py
|
# start of sentence token
SOS = '<S>'
# end of sentence token
EOS = '</S>'
|
# start of sentence token
SOS = chr(2)
# end of sentence token
EOS = chr(3)
|
Use separate characters for SOS and EOS
|
Use separate characters for SOS and EOS
|
Python
|
mit
|
milankinen/c2w2c,milankinen/c2w2c
|
---
+++
@@ -1,5 +1,5 @@
# start of sentence token
-SOS = '<S>'
+SOS = chr(2)
# end of sentence token
-EOS = '</S>'
+EOS = chr(3)
|
e6bfc4eb1d8f5a4d0239232fa89aa9d3d756549c
|
test/geocoders/geonames.py
|
test/geocoders/geonames.py
|
import unittest
from geopy.geocoders import GeoNames
from test.geocoders.util import GeocoderTestBase, env
@unittest.skipUnless( # pylint: disable=R0904,C0111
bool(env.get('GEONAMES_USERNAME')),
"No GEONAMES_USERNAME env variable set"
)
class GeoNamesTestCase(GeocoderTestBase):
@classmethod
def setUpClass(cls):
cls.delta = 0.04
def test_unicode_name(self):
"""
GeoNames.geocode unicode
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.geocode_run(
{"query": u"\u6545\u5bab"},
{"latitude": 30.90097, "longitude": 118.49436},
)
def test_reverse(self):
"""
GeoNames.reverse
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.reverse_run(
{"query": u"40.75376406311989, -73.98489005863667"},
{"latitude": 40.75376406311989, "longitude": -73.98489005863667},
)
|
# -*- coding: UTF-8 -*-
import unittest
from geopy.geocoders import GeoNames
from test.geocoders.util import GeocoderTestBase, env
@unittest.skipUnless( # pylint: disable=R0904,C0111
bool(env.get('GEONAMES_USERNAME')),
"No GEONAMES_USERNAME env variable set"
)
class GeoNamesTestCase(GeocoderTestBase):
@classmethod
def setUpClass(cls):
cls.delta = 0.04
def test_unicode_name(self):
"""
GeoNames.geocode unicode
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.geocode_run(
{"query": u"Musée du Louvre"},
{"latitude": 48.8610, "longitude": 2.335},
)
def test_reverse(self):
"""
GeoNames.reverse
"""
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.reverse_run(
{"query": u"40.75376406311989, -73.98489005863667"},
{"latitude": 40.75376406311989, "longitude": -73.98489005863667},
)
|
Use different location for GeoNames integration test
|
Use different location for GeoNames integration test
|
Python
|
mit
|
RDXT/geopy,Vimos/geopy,mthh/geopy,memaldi/geopy,ahlusar1989/geopy,jmb/geopy,two9seven/geopy,magnushiie/geopy,ahlusar1989/geopy,mthh/geopy,Vimos/geopy,smileliaohua/geopy,SoftwareArtisan/geopy,magnushiie/geopy,cffk/geopy,cffk/geopy,geopy/geopy,memaldi/geopy,RDXT/geopy,sebastianneubauer/geopy,sebastianneubauer/geopy,smileliaohua/geopy,two9seven/geopy,SoftwareArtisan/geopy
|
---
+++
@@ -1,4 +1,4 @@
-
+# -*- coding: UTF-8 -*-
import unittest
from geopy.geocoders import GeoNames
@@ -22,8 +22,8 @@
# work around ConfigurationError raised in GeoNames init
self.geocoder = GeoNames(username=env['GEONAMES_USERNAME'])
self.geocode_run(
- {"query": u"\u6545\u5bab"},
- {"latitude": 30.90097, "longitude": 118.49436},
+ {"query": u"Musée du Louvre"},
+ {"latitude": 48.8610, "longitude": 2.335},
)
def test_reverse(self):
|
56e5e255b19c0b0d5998628706542d8f9666f58c
|
tests/builtins/test_sum.py
|
tests/builtins/test_sum.py
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class SumTests(TranspileTestCase):
def test_sum_list(self):
self.assertCodeExecution("""
print(sum([1, 2, 3, 4, 5, 6, 7]))
""")
def test_sum_tuple(self):
self.assertCodeExecution("""
print(sum((1, 2, 3, 4, 5, 6, 7)))
""")
def test_sum_iterator(self):
self.assertCodeExecution("""
i = iter([1, 2])
print(sum(i))
print(sum(i))
""")
def test_sum_mix_floats_and_ints(self):
self.assertCodeExecution("""
print(sum([1, 1.414, 2, 3.14159]))
""")
class BuiltinSumFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["sum"]
not_implemented = [
'test_bytearray',
]
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class SumTests(TranspileTestCase):
def test_sum_list(self):
self.assertCodeExecution("""
print(sum([1, 2, 3, 4, 5, 6, 7]))
""")
def test_sum_tuple(self):
self.assertCodeExecution("""
print(sum((1, 2, 3, 4, 5, 6, 7)))
""")
def test_sum_iterator(self):
self.assertCodeExecution("""
i = iter([1, 2])
print(sum(i))
print(sum(i))
""")
def test_sum_mix_floats_and_ints(self):
self.assertCodeExecution("""
print(sum([1, 1.414, 2, 3.14159]))
""")
class BuiltinSumFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["sum"]
not_implemented = [
'test_bytearray',
'test_frozenzet',
]
|
Put ‘test_frozenset’ back into BuiltinSumFunctionTests.not_implemented
|
Put ‘test_frozenset’ back into BuiltinSumFunctionTests.not_implemented
I’m fairly certain that this was accidentally removed by my automatic processing
|
Python
|
bsd-3-clause
|
cflee/voc,freakboy3742/voc,cflee/voc,freakboy3742/voc
|
---
+++
@@ -30,4 +30,5 @@
not_implemented = [
'test_bytearray',
+ 'test_frozenzet',
]
|
abd0f3af8967cb7f261082a4f1ee90d4b5f274ca
|
purefap/core/management/commands/deleteusers.py
|
purefap/core/management/commands/deleteusers.py
|
from django.core.management.base import BaseCommand, CommandError
from purefap.core.models import FTPUser, FTPStaff, FTPClient
import shutil
from datetime import datetime
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--noop',
action='store_true',
dest='noop',
default=False,
help='Just print which users will be deleted'),
make_option('--files',
action='store_true',
dest='files',
default=False,
help='Delete user\'s homedir along with his account')
)
help = 'Delete expired/inactive users'
def handle(self, *args, **options):
for u in FTPClient.objects.all():
if u.expiry_date and u.expiry_date.isocalendar() < datetime.now().isocalendar():
self.stdout.write("User %s will be deleted" % u)
if options ['files']:
self.stdout.write(" - Directory %s and its contents will be deleted" % u.homedir)
if not options['noop']:
if options['files']:
shutil.rmtree(u.homedir)
u.delete()
|
from django.core.management.base import BaseCommand, CommandError
from purefap.core.models import FTPUser, FTPStaff, FTPClient
import shutil
from datetime import datetime
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--noop',
action='store_true',
dest='noop',
default=False,
help='Just print which users will be deleted'),
make_option('--files',
action='store_true',
dest='files',
default=False,
help='Delete user\'s homedir along with his account')
)
help = 'Delete expired/inactive users'
def handle(self, *args, **options):
for u in FTPClient.objects.all():
if u.expiry_date and u.expiry_date.isocalendar() < datetime.now().isocalendar():
self.stdout.write("User %s will be deleted" % u)
if options ['files']:
self.stdout.write(" - Directory %s and its contents will be deleted" % u.homedir)
if not options['noop']:
if options['files']:
shutil.rmtree(u.homedir.path)
u.delete()
|
Fix rmtree call for deleting user's homedirs
|
Fix rmtree call for deleting user's homedirs
|
Python
|
bsd-2-clause
|
fim/purefap
|
---
+++
@@ -27,5 +27,5 @@
self.stdout.write(" - Directory %s and its contents will be deleted" % u.homedir)
if not options['noop']:
if options['files']:
- shutil.rmtree(u.homedir)
+ shutil.rmtree(u.homedir.path)
u.delete()
|
3f180c4251a4217c31d954bae3fd5ffb5b49fbd7
|
build/presubmit_checks.py
|
build/presubmit_checks.py
|
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
def CheckChangeLogBug(input_api, output_api):
if input_api.change.BUG is None or re.match('\#\d+$', input_api.change.BUG):
return []
err = output_api.PresubmitError(
('Invalid bug "%s". BUG= should either not be present or start with # '
'for a github issue.' % input_api.change.BUG))
return [err]
def RunChecks(input_api, output_api, excluded_paths):
results = []
results += input_api.canned_checks.PanProjectChecks(
input_api, output_api, excluded_paths=excluded_paths)
results += input_api.canned_checks.RunPylint(
input_api, output_api, black_list=excluded_paths)
results += CheckChangeLogBug(input_api, output_api)
return results
|
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
def CheckChangeLogBug(input_api, output_api):
if input_api.change.BUG is None or re.match(
'(\#\d+)(,\s*\#\d+)*$', input_api.change.BUG):
return []
err = output_api.PresubmitError(
('Invalid bug "%s". BUG= should either not be present or start with # '
'for a github issue.' % input_api.change.BUG))
return [err]
def RunChecks(input_api, output_api, excluded_paths):
results = []
results += input_api.canned_checks.PanProjectChecks(
input_api, output_api, excluded_paths=excluded_paths)
results += input_api.canned_checks.RunPylint(
input_api, output_api, black_list=excluded_paths)
results += CheckChangeLogBug(input_api, output_api)
return results
|
Fix multiple bug IDs on presubmit.
|
Fix multiple bug IDs on presubmit.
BUG=#1212
TBR=nduca@chromium.org
Review URL: https://codereview.chromium.org/1282273002
|
Python
|
bsd-3-clause
|
catapult-project/catapult,scottmcmaster/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,danbeam/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult,0x90sled/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,benschmaus/catapult,zeptonaut/catapult,danbeam/catapult,sahiljain/catapult,scottmcmaster/catapult,catapult-project/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,SummerLW/Perf-Insight-Report,modulexcite/catapult,zeptonaut/catapult,benschmaus/catapult,scottmcmaster/catapult,catapult-project/catapult,catapult-project/catapult,benschmaus/catapult,modulexcite/catapult,danbeam/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,benschmaus/catapult,sahiljain/catapult,sahiljain/catapult,zeptonaut/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,modulexcite/catapult,benschmaus/catapult,0x90sled/catapult,catapult-project/catapult,benschmaus/catapult,danbeam/catapult,0x90sled/catapult
|
---
+++
@@ -5,7 +5,8 @@
def CheckChangeLogBug(input_api, output_api):
- if input_api.change.BUG is None or re.match('\#\d+$', input_api.change.BUG):
+ if input_api.change.BUG is None or re.match(
+ '(\#\d+)(,\s*\#\d+)*$', input_api.change.BUG):
return []
err = output_api.PresubmitError(
('Invalid bug "%s". BUG= should either not be present or start with # '
|
dd6621267957bf621629f6ccb1930f089c7fd3eb
|
Lib/plat-riscos/riscosenviron.py
|
Lib/plat-riscos/riscosenviron.py
|
"""A more or less complete user-defined wrapper around dictionary objects."""
import riscos
class _Environ:
def __init__(self, initial = None):
pass
def __repr__(self):
return repr(riscos.getenvdict())
def __cmp__(self, dict):
if isinstance(dict, UserDict):
return cmp(riscos.getenvdict(), dict)
def __len__(self):
return len(riscos.getenvdict())
def __getitem__(self, key):
ret = riscos.getenv(key)
if ret<>None:
return ret
else:
raise KeyError
def __setitem__(self, key, item):
riscos.setenv(key, item)
def __delitem__(self, key):
riscos.delenv(key)
def clear(self):
# too dangerous on RISC OS
pass
def copy(self):
return riscos.getenvdict()
def keys(self): return riscos.getenvdict().keys()
def items(self): return riscos.getenvdict().items()
def values(self): return riscos.getenvdict().values()
def has_key(self, key):
value = riscos.getenv(key)
return value<>None
def update(self, dict):
for k, v in dict.items():
riscos.putenv(k, v)
def get(self, key, failobj=None):
value = riscos.getenv(key)
if value<>None:
return value
else:
return failobj
|
"""A more or less complete user-defined wrapper around dictionary objects."""
import riscos
class _Environ:
def __init__(self, initial = None):
pass
def __repr__(self):
return repr(riscos.getenvdict())
def __cmp__(self, dict):
if isinstance(dict, UserDict):
return cmp(riscos.getenvdict(), dict)
def __len__(self):
return len(riscos.getenvdict())
def __getitem__(self, key):
ret = riscos.getenv(key)
if ret<>None:
return ret
else:
raise KeyError
def __setitem__(self, key, item):
riscos.putenv(key, item)
def __delitem__(self, key):
riscos.delenv(key)
def clear(self):
# too dangerous on RISC OS
pass
def copy(self):
return riscos.getenvdict()
def keys(self): return riscos.getenvdict().keys()
def items(self): return riscos.getenvdict().items()
def values(self): return riscos.getenvdict().values()
def has_key(self, key):
value = riscos.getenv(key)
return value<>None
def update(self, dict):
for k, v in dict.items():
riscos.putenv(k, v)
def get(self, key, failobj=None):
value = riscos.getenv(key)
if value<>None:
return value
else:
return failobj
|
Replace setenv with putenv. Reported by Dietmar Schwertberger.
|
Replace setenv with putenv. Reported by Dietmar Schwertberger.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
---
+++
@@ -19,7 +19,7 @@
else:
raise KeyError
def __setitem__(self, key, item):
- riscos.setenv(key, item)
+ riscos.putenv(key, item)
def __delitem__(self, key):
riscos.delenv(key)
def clear(self):
|
cf0193adcf6c58d82b577f09842c265bc09a685a
|
candidates/csv_helpers.py
|
candidates/csv_helpers.py
|
import csv
import StringIO
from .models import CSV_ROW_FIELDS
def encode_row_values(d):
return {
k: unicode('' if v is None else v).encode('utf-8')
for k, v in d.items()
}
def list_to_csv(candidates_list):
output = StringIO.StringIO()
writer = csv.DictWriter(
output,
fieldnames=CSV_ROW_FIELDS,
dialect=csv.excel)
writer.writeheader()
for row in candidates_list:
writer.writerow(encode_row_values(row))
return output.getvalue()
|
import csv
import StringIO
from .models import CSV_ROW_FIELDS
def encode_row_values(d):
return {
k: unicode('' if v is None else v).encode('utf-8')
for k, v in d.items()
}
def candidate_sort_key(row):
return (row['constituency'], row['name'].split()[-1])
def list_to_csv(candidates_list):
output = StringIO.StringIO()
writer = csv.DictWriter(
output,
fieldnames=CSV_ROW_FIELDS,
dialect=csv.excel)
writer.writeheader()
for row in sorted(candidates_list, key=candidate_sort_key):
writer.writerow(encode_row_values(row))
return output.getvalue()
|
Sort the rows in CSV output on (constituency, last name)
|
Sort the rows in CSV output on (constituency, last name)
|
Python
|
agpl-3.0
|
datamade/yournextmp-popit,openstate/yournextrepresentative,datamade/yournextmp-popit,DemocracyClub/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextrepresentative,openstate/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,datamade/yournextmp-popit,openstate/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit
|
---
+++
@@ -9,6 +9,9 @@
for k, v in d.items()
}
+def candidate_sort_key(row):
+ return (row['constituency'], row['name'].split()[-1])
+
def list_to_csv(candidates_list):
output = StringIO.StringIO()
writer = csv.DictWriter(
@@ -16,6 +19,6 @@
fieldnames=CSV_ROW_FIELDS,
dialect=csv.excel)
writer.writeheader()
- for row in candidates_list:
+ for row in sorted(candidates_list, key=candidate_sort_key):
writer.writerow(encode_row_values(row))
return output.getvalue()
|
f1c09bc9969cf9d66179baef80b5cbb3d28d5596
|
app/report/views.py
|
app/report/views.py
|
from flask import render_template
from app import app
@app.route('/')
def index():
return render_template('index.html')
@app.route('/report/<path:repository>')
def report():
pass
|
from flask import flash, g, redirect, render_template, request, url_for
from app import app
from vcs.repository import is_valid_github_repository, parse_url_and_get_repo
@app.route('/')
def index():
return render_template('index.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/check', methods=['POST'])
def check():
url = request.form['url']
if not is_valid_github_repository(url):
flash('Given repository url is not valid')
return redirect(url_for('index'))
return redirect(url_for('report', repo_url=url))
@app.route('/report/<path:repo_url>', methods=['GET'])
def report(repo_url):
repo = parse_url_and_get_repo(repo_url)
if repo is None:
flash('Given repository does not exists')
return redirect(url_for('index'))
results = {}
# Analysis processing
return render_template('report/results.html', results=results)
|
Create default behaviour for all routers
|
Create default behaviour for all routers
|
Python
|
mit
|
mingrammer/pyreportcard,mingrammer/pyreportcard
|
---
+++
@@ -1,13 +1,33 @@
-from flask import render_template
+from flask import flash, g, redirect, render_template, request, url_for
from app import app
-
+from vcs.repository import is_valid_github_repository, parse_url_and_get_repo
@app.route('/')
def index():
return render_template('index.html')
-@app.route('/report/<path:repository>')
-def report():
- pass
+@app.route('/about')
+def about():
+ return render_template('about.html')
+
+
+@app.route('/check', methods=['POST'])
+def check():
+ url = request.form['url']
+ if not is_valid_github_repository(url):
+ flash('Given repository url is not valid')
+ return redirect(url_for('index'))
+ return redirect(url_for('report', repo_url=url))
+
+
+@app.route('/report/<path:repo_url>', methods=['GET'])
+def report(repo_url):
+ repo = parse_url_and_get_repo(repo_url)
+ if repo is None:
+ flash('Given repository does not exists')
+ return redirect(url_for('index'))
+ results = {}
+ # Analysis processing
+ return render_template('report/results.html', results=results)
|
3c02b3d104f3a43b019fec9b4168558562ad366c
|
cfp/migrations/0029_auto_20150228_0428.py
|
cfp/migrations/0029_auto_20150228_0428.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.db import transaction
@transaction.atomic
def create_topics(apps, schema_editor):
Topic = apps.get_model("cfp", "Topic")
Conference = apps.get_model("cfp", "Conference")
for conf in Conference.objects.exclude(programming_language=''):
topic = Topic.object.get_or_create(value=conf.programming_language)
conf.topics.add(topic)
class Migration(migrations.Migration):
dependencies = [
('cfp', '0028_auto_20150228_0428'),
]
operations = [
migrations.RunPython(create_topics)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.db import transaction
@transaction.atomic
def create_topics(apps, schema_editor):
Topic = apps.get_model("cfp", "Topic")
Conference = apps.get_model("cfp", "Conference")
for conf in Conference.objects.exclude(programming_language=''):
topic = Topic.objects.get_or_create(value=conf.programming_language)
conf.topics.add(topic)
class Migration(migrations.Migration):
dependencies = [
('cfp', '0028_auto_20150228_0428'),
]
operations = [
migrations.RunPython(create_topics)
]
|
Fix migration that never actually worked
|
Fix migration that never actually worked
|
Python
|
mit
|
kyleconroy/speakers,kyleconroy/speakers,kyleconroy/speakers
|
---
+++
@@ -11,7 +11,7 @@
Conference = apps.get_model("cfp", "Conference")
for conf in Conference.objects.exclude(programming_language=''):
- topic = Topic.object.get_or_create(value=conf.programming_language)
+ topic = Topic.objects.get_or_create(value=conf.programming_language)
conf.topics.add(topic)
|
703fb96d207d71bc2061f796ceabb7ffaccca34e
|
dotter/__main__.py
|
dotter/__main__.py
|
import argparse
import logging
import os
from .client import GithubCachedClient
from .search import get_dotfiles, SEARCH_QUERIES
def parse_args():
parser = argparse.ArgumentParser(description='Search github for common lines in dotfiles')
parser.add_argument('-t', '--token-file', type=os.path.abspath,
help='path to file containing Github token')
parser.add_argument('-c', '--cache-path', type=os.path.abspath,
help='path to cache directory')
return parser.parse_args()
def main():
args = parse_args()
token = open(args.token_file).read().strip()
client = GithubCachedClient(cache_path=args.cache_path, token=token)
dots = get_dotfiles(client, queries=SEARCH_QUERIES)
return dots
if __name__ == '__main__':
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
dots = main()
for ftype in dots:
print
print ftype
print '-' * 40
print "\n".join("%s\t%d" % i for i in dots[ftype].top_lines())
|
import argparse
import logging
import os
from .client import GithubCachedClient, GithubClient
from .search import get_dotfiles, SEARCH_QUERIES
def parse_args():
parser = argparse.ArgumentParser(description='Search github for common lines in dotfiles')
parser.add_argument('-t', '--token-file', type=os.path.abspath,
help='path to file containing Github token')
parser.add_argument('-c', '--cache-path', type=os.path.abspath,
help='path to cache directory')
return parser.parse_args()
def main():
args = parse_args()
token = open(args.token_file).read().strip() if args.token_file else None
if args.cache_path:
client = GithubCachedClient(cache_path=args.cache_path, token=token)
else:
client = GithubClient(token=token)
dots = get_dotfiles(client, queries=SEARCH_QUERIES)
return dots
if __name__ == '__main__':
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
dots = main()
for ftype in dots:
print
print ftype
print '-' * 40
print "\n".join("%s\t%d" % i for i in dots[ftype].top_lines())
|
Make token-file and cache-path optional
|
Make token-file and cache-path optional
|
Python
|
mit
|
allait/dotter
|
---
+++
@@ -2,7 +2,7 @@
import logging
import os
-from .client import GithubCachedClient
+from .client import GithubCachedClient, GithubClient
from .search import get_dotfiles, SEARCH_QUERIES
@@ -18,8 +18,12 @@
def main():
args = parse_args()
- token = open(args.token_file).read().strip()
- client = GithubCachedClient(cache_path=args.cache_path, token=token)
+ token = open(args.token_file).read().strip() if args.token_file else None
+
+ if args.cache_path:
+ client = GithubCachedClient(cache_path=args.cache_path, token=token)
+ else:
+ client = GithubClient(token=token)
dots = get_dotfiles(client, queries=SEARCH_QUERIES)
|
40b78b23072841cb7926d06a9d37f5a7cdd817ab
|
erpnext_ebay/tasks.py
|
erpnext_ebay/tasks.py
|
# -*- coding: utf-8 -*-
"""Scheduled tasks to be run by erpnext_ebay"""
from frappe.utils.background_jobs import enqueue
def all():
pass
def hourly():
enqueue('erpnext_ebay.sync_orders.sync',
queue='long', job_name='Sync eBay Orders')
def daily():
enqueue('erpnext_ebay.ebay_active_listings.update_ebay_data',
queue='long', job_name='Update eBay Data')
enqueue('erpnext_ebay.ebay_categories.category_sync',
queue='long', job_name='eBay Category Sync')
def weekly():
pass
def monthly():
pass
|
# -*- coding: utf-8 -*-
"""Scheduled tasks to be run by erpnext_ebay"""
from frappe.utils.background_jobs import enqueue
def all():
pass
def hourly():
enqueue('erpnext_ebay.sync_orders.sync',
queue='long', job_name='Sync eBay Orders')
def daily():
enqueue('erpnext_ebay.ebay_active_listings.update_ebay_data',
queue='long', job_name='Update eBay Data',
multiple_error_sites=['UK'])
enqueue('erpnext_ebay.ebay_categories.category_sync',
queue='long', job_name='eBay Category Sync')
def weekly():
pass
def monthly():
pass
|
Add multiple_error_sites for daily eBay update
|
fix: Add multiple_error_sites for daily eBay update
|
Python
|
mit
|
bglazier/erpnext_ebay,bglazier/erpnext_ebay
|
---
+++
@@ -16,7 +16,8 @@
def daily():
enqueue('erpnext_ebay.ebay_active_listings.update_ebay_data',
- queue='long', job_name='Update eBay Data')
+ queue='long', job_name='Update eBay Data',
+ multiple_error_sites=['UK'])
enqueue('erpnext_ebay.ebay_categories.category_sync',
queue='long', job_name='eBay Category Sync')
|
3fe40e91f70e8256d7c86c46f866e82e3ccf26e2
|
commandment/profiles/cert.py
|
commandment/profiles/cert.py
|
'''
Copyright (c) 2015 Jesse Peterson
Licensed under the MIT license. See the included LICENSE.txt file for details.
'''
from . import Payload
import plistlib # needed for Data() wrapper
class PEMCertificatePayload(Payload):
'''PEM-encoded certificate without private key. May contain root
certificates.
Payload type of "com.apple.security.pem". Further encodes cert_data as
plistlib.Data instance (Base64 data).'''
payload_type = 'com.apple.security.pem'
def __init__(self, identifier, cert_data, uuid=None, **kwargs):
Payload.__init__(self, self.payload_type, identifier, uuid, **kwargs)
self.payload['PayloadContent'] = plistlib.Data(cert_data)
class PKCS12CertificatePayload(Payload):
'''Password-protected identity certificate. Only one certificate may be
included.
Payload type of "com.apple.security.pkcs12". Include a PKCS#12 (.p12)
identity as cert_data. Further encodes cert_data as plistlib.Data instance
(Base64 data). Include a password argument for the PKCS#12 identity.'''
payload_type = 'com.apple.security.pkcs12'
def __init__(self, identifier, cert_data, password=None, uuid=None, **kwargs):
Payload.__init__(self, self.payload_type, identifier, uuid, **kwargs)
self.payload['PayloadContent'] = plistlib.Data(cert_data)
if password:
self.payload['Password'] = password
|
'''
Copyright (c) 2015 Jesse Peterson
Licensed under the MIT license. See the included LICENSE.txt file for details.
'''
from . import Payload
import plistlib # needed for Data() wrapper
class PEMCertificatePayload(Payload):
'''PEM-encoded certificate without private key. May contain root
certificates.
Payload type of "com.apple.security.pem". Further encodes cert_data as
plistlib.Data instance (Base64 data).'''
payload_type = 'com.apple.security.pem'
def __init__(self, identifier, cert_data, uuid=None, **kwargs):
kwargs['PayloadContent'] = plistlib.Data(cert_data)
Payload.__init__(self, self.payload_type, identifier, uuid, **kwargs)
class PKCS12CertificatePayload(Payload):
'''Password-protected identity certificate. Only one certificate may be
included.
Payload type of "com.apple.security.pkcs12". Include a PKCS#12 (.p12)
identity as cert_data. Further encodes cert_data as plistlib.Data instance
(Base64 data). Include a password argument for the PKCS#12 identity.'''
payload_type = 'com.apple.security.pkcs12'
def __init__(self, identifier, cert_data, password=None, uuid=None, **kwargs):
kwargs['PayloadContent'] = plistlib.Data(cert_data)
if password:
kwargs['Password'] = password
Payload.__init__(self, self.payload_type, identifier, uuid, **kwargs)
|
Change style of Payload suclasses to better encapsulate internal structure
|
Change style of Payload suclasses to better encapsulate internal structure
|
Python
|
mit
|
mosen/commandment,jessepeterson/commandment,mosen/commandment,mosen/commandment,mosen/commandment,jessepeterson/commandment,mosen/commandment
|
---
+++
@@ -16,9 +16,8 @@
payload_type = 'com.apple.security.pem'
def __init__(self, identifier, cert_data, uuid=None, **kwargs):
+ kwargs['PayloadContent'] = plistlib.Data(cert_data)
Payload.__init__(self, self.payload_type, identifier, uuid, **kwargs)
-
- self.payload['PayloadContent'] = plistlib.Data(cert_data)
class PKCS12CertificatePayload(Payload):
'''Password-protected identity certificate. Only one certificate may be
@@ -31,9 +30,7 @@
payload_type = 'com.apple.security.pkcs12'
def __init__(self, identifier, cert_data, password=None, uuid=None, **kwargs):
+ kwargs['PayloadContent'] = plistlib.Data(cert_data)
+ if password:
+ kwargs['Password'] = password
Payload.__init__(self, self.payload_type, identifier, uuid, **kwargs)
-
- self.payload['PayloadContent'] = plistlib.Data(cert_data)
-
- if password:
- self.payload['Password'] = password
|
20211a9494cc4ecd3f50bf1280d034da8f0cda50
|
comics/accounts/models.py
|
comics/accounts/models.py
|
import uuid
from django.contrib.auth.models import User
from django.db import models
from django.dispatch import receiver
@receiver(models.signals.post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
class UserProfile(models.Model):
user = models.OneToOneField(User)
secret_key = models.CharField(max_length=32, blank=False,
help_text='Secret key for feed and API access')
class Meta:
db_table = 'comics_user_profile'
def __init__(self, *args, **kwargs):
super(UserProfile, self).__init__(*args, **kwargs)
if self.secret_key is None:
self.generate_new_secret_key()
def __unicode__(self):
return u'User profile for %s' % self.user
def generate_new_secret_key(self):
self.secret_key = uuid.uuid4().hex
|
import uuid
from django.contrib.auth.models import User
from django.db import models
from django.dispatch import receiver
@receiver(models.signals.post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
def make_secret_key():
return uuid.uuid4().hex
class UserProfile(models.Model):
user = models.OneToOneField(User)
secret_key = models.CharField(max_length=32, blank=False,
default=make_secret_key,
help_text='Secret key for feed and API access')
class Meta:
db_table = 'comics_user_profile'
def __unicode__(self):
return u'User profile for %s' % self.user
def generate_new_secret_key(self):
self.secret_key = make_secret_key()
|
Fix secret key generation for superuser created with mangage.py
|
Fix secret key generation for superuser created with mangage.py
|
Python
|
agpl-3.0
|
datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics
|
---
+++
@@ -11,21 +11,21 @@
UserProfile.objects.create(user=instance)
+def make_secret_key():
+ return uuid.uuid4().hex
+
+
class UserProfile(models.Model):
user = models.OneToOneField(User)
secret_key = models.CharField(max_length=32, blank=False,
+ default=make_secret_key,
help_text='Secret key for feed and API access')
class Meta:
db_table = 'comics_user_profile'
- def __init__(self, *args, **kwargs):
- super(UserProfile, self).__init__(*args, **kwargs)
- if self.secret_key is None:
- self.generate_new_secret_key()
-
def __unicode__(self):
return u'User profile for %s' % self.user
def generate_new_secret_key(self):
- self.secret_key = uuid.uuid4().hex
+ self.secret_key = make_secret_key()
|
21b884876ad211851b0c8954a1cc3e4b42cae11e
|
test_chatbot_brain.py
|
test_chatbot_brain.py
|
import chatbot_brain
import input_filters
def test_initialize_bot():
bot = chatbot_brain.Chatbot()
assert len(bot.tri_lexicon) == 0
assert len(bot.bi_lexicon) == 0
def test_fill_lexicon():
bot = chatbot_brain.Chatbot()
bot.fill_lexicon()
assert len(bot.tri_lexicon) > 0
assert len(bot.bi_lexicon) > 0
def test_compose_response():
bot = chatbot_brain.Chatbot()
output = bot.compose_response(input_sent="How are you doing?")
assert "," not in output[0]
for sentence in output:
assert "." not in sentence[:-1]
|
import chatbot_brain
def test_initialize_bot():
bot = chatbot_brain.Chatbot()
assert len(bot.tri_lexicon) == 0
assert len(bot.bi_lexicon) == 0
def test_fill_lexicon():
bot = chatbot_brain.Chatbot()
bot.fill_lexicon()
assert len(bot.tri_lexicon) > 0
assert len(bot.bi_lexicon) > 0
def test_compose_response():
bot = chatbot_brain.Chatbot()
output = bot.compose_response(input_sent="How are you doing?")
assert "," not in output[0]
for sentence in output:
assert "." not in sentence[:-1]
def test_i_filter_random_empty_words():
u"""Assert the returned word is in the lexicon and is not a stop char."""
bot = chatbot_brain.Chatbot()
words = [""]
assert bot.i_filter_random(words) == u"What a funny thing to say!"
# untested methods:
# i_filter_random
# o_filter_random
# _create_chains
# _pair_seed
# _chain_filters
# _filter_recursive
|
Add test_i_filter_random_empty_words() to assert that a list that contains an empty string will return the stock phrase
|
Add test_i_filter_random_empty_words() to assert that a list that contains an empty string will return the stock phrase
|
Python
|
mit
|
corinnelhh/chatbot,corinnelhh/chatbot
|
---
+++
@@ -1,5 +1,4 @@
import chatbot_brain
-import input_filters
def test_initialize_bot():
@@ -21,3 +20,18 @@
assert "," not in output[0]
for sentence in output:
assert "." not in sentence[:-1]
+
+
+def test_i_filter_random_empty_words():
+ u"""Assert the returned word is in the lexicon and is not a stop char."""
+ bot = chatbot_brain.Chatbot()
+ words = [""]
+ assert bot.i_filter_random(words) == u"What a funny thing to say!"
+
+# untested methods:
+# i_filter_random
+# o_filter_random
+# _create_chains
+# _pair_seed
+# _chain_filters
+# _filter_recursive
|
3414f24398c3336f5dae8d18035f703db24e492a
|
ynr/apps/elections/urls.py
|
ynr/apps/elections/urls.py
|
from django.conf.urls import url
from elections import views
from elections.helpers import ElectionIDSwitcher
urlpatterns = [
url(
"elections/$",
views.ElectionListView.as_view(),
name="election_list_view",
),
url(
"elections/(?P<election>[^/]+)/$",
ElectionIDSwitcher(
election_view=views.ElectionView, ballot_view=views.BallotPaperView
),
name="election_view",
),
url(
r"^elections/(?P<election>[^/]+)/unlocked/",
views.UnlockedBallotsForElectionListView.as_view(),
name="constituencies-unlocked",
),
url(
r"^election/(?P<ballot_id>[^/]+)/lock/",
views.LockBallotView.as_view(),
name="constituency-lock",
),
url(
r"^elections/(?P<ballot_id>[^/]+).csv",
views.BallotPaperCSVView.as_view(),
name="ballot_paper_csv",
),
]
|
from django.conf.urls import url
from elections import views
from elections.helpers import ElectionIDSwitcher
urlpatterns = [
url(
"^elections/$",
views.ElectionListView.as_view(),
name="election_list_view",
),
url(
"^elections/(?P<election>[^/]+)/$",
ElectionIDSwitcher(
election_view=views.ElectionView, ballot_view=views.BallotPaperView
),
name="election_view",
),
url(
r"^elections/(?P<election>[^/]+)/unlocked/",
views.UnlockedBallotsForElectionListView.as_view(),
name="constituencies-unlocked",
),
url(
r"^election/(?P<ballot_id>[^/]+)/lock/",
views.LockBallotView.as_view(),
name="constituency-lock",
),
url(
r"^elections/(?P<ballot_id>[^/]+).csv",
views.BallotPaperCSVView.as_view(),
name="ballot_paper_csv",
),
]
|
Make elections URLs match less
|
Make elections URLs match less
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
---
+++
@@ -5,12 +5,12 @@
urlpatterns = [
url(
- "elections/$",
+ "^elections/$",
views.ElectionListView.as_view(),
name="election_list_view",
),
url(
- "elections/(?P<election>[^/]+)/$",
+ "^elections/(?P<election>[^/]+)/$",
ElectionIDSwitcher(
election_view=views.ElectionView, ballot_view=views.BallotPaperView
),
|
e60649e08ce6b1b01d1480bc06433007c9c320ee
|
zun/tests/tempest/utils.py
|
zun/tests/tempest/utils.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
def wait_for_condition(condition, interval=1, timeout=60):
start_time = time.time()
end_time = time.time() + timeout
while time.time() < end_time:
result = condition()
if result:
return result
time.sleep(interval)
raise Exception(("Timed out after %s seconds. Started on %s and ended "
"on %s") % (timeout, start_time, end_time))
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
def wait_for_condition(condition, interval=2, timeout=60):
start_time = time.time()
end_time = time.time() + timeout
while time.time() < end_time:
result = condition()
if result:
return result
time.sleep(interval)
raise Exception(("Timed out after %s seconds. Started on %s and ended "
"on %s") % (timeout, start_time, end_time))
|
Change the tempest test interval to 2s.
|
Change the tempest test interval to 2s.
Always seen tempest test fail with api timeout error. Refer to
http://logs.openstack.org/42/469342/2/check/
gate-zun-devstack-dsvm-docker-sql/02b4650/
logs/apache/zun_api.txt.gz#_2017-05-31_08_29_52_459
But run the tempest in local devstack, the logs show there are
lots of rabbitmq messages and seems the rabbitmq cannot handle
so much message. Checked the code there are lots of check status
called in tempest test.
So this patch propose to change the check status interval to 2s
to reduce the numbers of messages to avoid rabbitmq message stuck.
Change-Id: Ibb41ae552bcce0f685834e13a6c3a5836f657cab
|
Python
|
apache-2.0
|
kevin-zhaoshuai/zun,kevin-zhaoshuai/zun,kevin-zhaoshuai/zun
|
---
+++
@@ -13,7 +13,7 @@
import time
-def wait_for_condition(condition, interval=1, timeout=60):
+def wait_for_condition(condition, interval=2, timeout=60):
start_time = time.time()
end_time = time.time() + timeout
while time.time() < end_time:
|
fa6902b75b9eb274e2dd410e3702d77fed018050
|
bot/api/telegram.py
|
bot/api/telegram.py
|
import requests
from bot.api.domain import ApiObject
class TelegramBotApi:
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = requests.get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
|
import requests
class TelegramBotApi:
"""This is a threading-safe API. Avoid breaking it by adding state."""
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = requests.get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
|
Mark TelegramBotApi as thread-safe. Also remove unused import.
|
Mark TelegramBotApi as thread-safe. Also remove unused import.
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
---
+++
@@ -1,9 +1,9 @@
import requests
-
-from bot.api.domain import ApiObject
class TelegramBotApi:
+ """This is a threading-safe API. Avoid breaking it by adding state."""
+
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
|
bc7b52e9f2095291f9277e3c9cbac9c191fa61a5
|
cherrypy/py3util.py
|
cherrypy/py3util.py
|
"""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
|
"""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
try:
sorted = sorted
except NameError:
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
try:
reversed = reversed
except NameError:
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
|
Use builtin sorted, reversed if available.
|
Use builtin sorted, reversed if available.
--HG--
extra : convert_revision : svn%3Ae1d34091-3ce9-0310-8e96-997e60db3bd5/trunk%402444
|
Python
|
bsd-3-clause
|
cherrypy/magicbus
|
---
+++
@@ -3,11 +3,17 @@
"""
import sys
-def sorted(lst):
- newlst = list(lst)
- newlst.sort()
- return newlst
+try:
+ sorted = sorted
+except NameError:
+ def sorted(lst):
+ newlst = list(lst)
+ newlst.sort()
+ return newlst
-def reversed(lst):
- newlst = list(lst)
- return iter(newlst[::-1])
+try:
+ reversed = reversed
+except NameError:
+ def reversed(lst):
+ newlst = list(lst)
+ return iter(newlst[::-1])
|
20370bf79b43dc566a6a7e85a903275d80e437a2
|
api/projects/signals.py
|
api/projects/signals.py
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from projects.models import ExperimentGroup
from projects.tasks import start_group_experiments
from experiments.models import Experiment
@receiver(post_save, sender=ExperimentGroup, dispatch_uid="experiment_group_saved")
def new_experiment_group(sender, **kwargs):
instance = kwargs['instance']
created = kwargs.get('created', False)
if not created:
return
# Parse polyaxonfile content and create the experiments
specification = instance.specification
for xp in range(specification.matrix_space):
Experiment.objects.create(project=instance.project,
user=instance.user,
experiment_group=instance,
config=specification.parsed_data[xp])
start_group_experiments.delay(instance.id)
|
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from projects.models import ExperimentGroup
from projects.tasks import start_group_experiments
from experiments.models import Experiment
from spawner import scheduler
@receiver(post_save, sender=ExperimentGroup, dispatch_uid="experiment_group_saved")
def new_experiment_group(sender, **kwargs):
instance = kwargs['instance']
created = kwargs.get('created', False)
if not created:
return
# Parse polyaxonfile content and create the experiments
specification = instance.specification
for xp in range(specification.matrix_space):
Experiment.objects.create(project=instance.project,
user=instance.user,
experiment_group=instance,
config=specification.parsed_data[xp])
start_group_experiments.delay(instance.id)
@receiver(pre_save, sender=ExperimentGroup, dispatch_uid="experiment_group_deleted")
def experiment_group_deleted(sender, **kwargs):
"""Stop all experiments before deleting the group."""
instance = kwargs['instance']
for experiment in instance.running_experiments:
scheduler.schedule_stop_experiment(experiment, is_delete=True)
|
Stop experiments before deleting group
|
Stop experiments before deleting group
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
---
+++
@@ -1,9 +1,10 @@
-from django.db.models.signals import post_save
+from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from projects.models import ExperimentGroup
from projects.tasks import start_group_experiments
from experiments.models import Experiment
+from spawner import scheduler
@receiver(post_save, sender=ExperimentGroup, dispatch_uid="experiment_group_saved")
@@ -25,3 +26,11 @@
config=specification.parsed_data[xp])
start_group_experiments.delay(instance.id)
+
+
+@receiver(pre_save, sender=ExperimentGroup, dispatch_uid="experiment_group_deleted")
+def experiment_group_deleted(sender, **kwargs):
+ """Stop all experiments before deleting the group."""
+ instance = kwargs['instance']
+ for experiment in instance.running_experiments:
+ scheduler.schedule_stop_experiment(experiment, is_delete=True)
|
0eafac86c679689c77e371150c173c351d0aa926
|
appex_dump.py
|
appex_dump.py
|
# coding: utf-8
# See: https://forum.omz-software.com/topic/2358/appex-safari-content
import appex
def main():
if appex.is_running_extension():
for func in (appex.get_attachments, appex.get_file_path,
appex.get_file_paths, appex.get_image, appex.get_images,
appex.get_text, appex.get_url, appex.get_urls,
appex.get_vcard, appex.get_vcards):
print('{:<11} : {}'.format(func.func_name.partition('_')[2], func()))
if __name__ == '__main__':
main()
|
# coding: utf-8
# See: https://forum.omz-software.com/topic/2358/appex-safari-content
import appex, inspect
def main():
if appex.is_running_extension():
for name_func in inspect.getmembers(appex):
name, func = name_func
if name.startswith('get_'): # find all appex.get_xxx() methods
print('{:<11} : {}'.format(name.partition('_')[2], func()))
if __name__ == '__main__':
main()
|
Use inspect to remove hardcoding of method names
|
Use inspect to remove hardcoding of method names
|
Python
|
apache-2.0
|
cclauss/Ten-lines-or-less
|
---
+++
@@ -2,15 +2,14 @@
# See: https://forum.omz-software.com/topic/2358/appex-safari-content
-import appex
+import appex, inspect
def main():
if appex.is_running_extension():
- for func in (appex.get_attachments, appex.get_file_path,
- appex.get_file_paths, appex.get_image, appex.get_images,
- appex.get_text, appex.get_url, appex.get_urls,
- appex.get_vcard, appex.get_vcards):
- print('{:<11} : {}'.format(func.func_name.partition('_')[2], func()))
+ for name_func in inspect.getmembers(appex):
+ name, func = name_func
+ if name.startswith('get_'): # find all appex.get_xxx() methods
+ print('{:<11} : {}'.format(name.partition('_')[2], func()))
if __name__ == '__main__':
main()
|
42536943591ef77df3fc453e6e0b456e7a2bed89
|
cupy/array_api/_typing.py
|
cupy/array_api/_typing.py
|
"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
from cupy.cuda import Device as _Device
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
from ._array_object import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
_T = TypeVar("_T")
NestedSequence = Sequence[Sequence[_T]]
Device = _Device
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
from cupy.cuda import Device as _Device
from __future__ import annotations
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import (
Any,
Literal,
Sequence,
Type,
Union,
TYPE_CHECKING,
TypeVar,
Protocol,
)
from ._array_object import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
_T_co = TypeVar("_T_co", covariant=True)
class NestedSequence(Protocol[_T_co]):
def __getitem__(self, key: int, /) -> _T_co | NestedSequence[_T_co]: ...
def __len__(self, /) -> int: ...
Device = _Device
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
Replace `NestedSequence` with a proper nested sequence protocol
|
ENH: Replace `NestedSequence` with a proper nested sequence protocol
|
Python
|
mit
|
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
|
---
+++
@@ -9,6 +9,8 @@
from cupy.cuda import Device as _Device
+from __future__ import annotations
+
__all__ = [
"Array",
"Device",
@@ -19,7 +21,16 @@
]
import sys
-from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
+from typing import (
+ Any,
+ Literal,
+ Sequence,
+ Type,
+ Union,
+ TYPE_CHECKING,
+ TypeVar,
+ Protocol,
+)
from ._array_object import Array
from numpy import (
@@ -36,10 +47,11 @@
float64,
)
-# This should really be recursive, but that isn't supported yet. See the
-# similar comment in numpy/typing/_array_like.py
-_T = TypeVar("_T")
-NestedSequence = Sequence[Sequence[_T]]
+_T_co = TypeVar("_T_co", covariant=True)
+
+class NestedSequence(Protocol[_T_co]):
+ def __getitem__(self, key: int, /) -> _T_co | NestedSequence[_T_co]: ...
+ def __len__(self, /) -> int: ...
Device = _Device
if TYPE_CHECKING or sys.version_info >= (3, 9):
|
79488513dfedb27a627a1eb516fb2fb2b6a2900c
|
geotrek/settings/env_tests.py
|
geotrek/settings/env_tests.py
|
#
# Django Tests
# ..........................
TEST = True
CELERY_ALWAYS_EAGER = True
TEST_EXCLUDE = ('django',)
INSTALLED_APPS += (
'geotrek.diving',
'geotrek.sensitivity',
'geotrek.outdoor',
)
LOGGING['handlers']['console']['level'] = 'CRITICAL'
LANGUAGE_CODE = 'en'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'en'
MODELTRANSLATION_LANGUAGES = ('en', 'es', 'fr', 'it')
LAND_BBOX_AREAS_ENABLED = True
class DisableMigrations():
def __contains__(self, item):
return True
def __getitem__(self, item):
return None
MIGRATION_MODULES = DisableMigrations()
ADMINS = (
('test', 'test@test.com'),
)
MANAGERS = ADMINS
TEST_RUNNER = 'geotrek.test_runner.TestRunner'
|
#
# Django Tests
# ..........................
TEST = True
CELERY_ALWAYS_EAGER = True
TEST_EXCLUDE = ('django',)
INSTALLED_APPS += (
'geotrek.diving',
'geotrek.sensitivity',
'geotrek.outdoor',
'drf_yasg',
)
LOGGING['handlers']['console']['level'] = 'CRITICAL'
LANGUAGE_CODE = 'en'
MODELTRANSLATION_DEFAULT_LANGUAGE = 'en'
MODELTRANSLATION_LANGUAGES = ('en', 'es', 'fr', 'it')
LAND_BBOX_AREAS_ENABLED = True
class DisableMigrations():
def __contains__(self, item):
return True
def __getitem__(self, item):
return None
MIGRATION_MODULES = DisableMigrations()
ADMINS = (
('test', 'test@test.com'),
)
MANAGERS = ADMINS
TEST_RUNNER = 'geotrek.test_runner.TestRunner'
|
Enable drf_yasg in test settings
|
Enable drf_yasg in test settings
|
Python
|
bsd-2-clause
|
makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek
|
---
+++
@@ -12,6 +12,7 @@
'geotrek.diving',
'geotrek.sensitivity',
'geotrek.outdoor',
+ 'drf_yasg',
)
LOGGING['handlers']['console']['level'] = 'CRITICAL'
|
4a6ef7b593786f409c72f192c50e16e40082c8de
|
apps/dashboards/urls.py
|
apps/dashboards/urls.py
|
from django.conf.urls import patterns, url
from django.views.generic.simple import redirect_to
urlpatterns = patterns('dashboards.views',
url(r'^dashboards/revisions$', 'revisions', name='dashboards.revisions'),
url(r'^dashboards/user_lookup$', 'user_lookup',
name='dashboards.user_lookup'),
url(r'^dashboards/topic_lookup$', 'topic_lookup',
name='dashboards.topic_lookup'),
url(r'^dashboards/localization$', redirect_to,
{'url': '/docs/MDN/Doc_status/Overview'}),
)
|
from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
urlpatterns = patterns('dashboards.views',
url(r'^dashboards/revisions$', 'revisions', name='dashboards.revisions'),
url(r'^dashboards/user_lookup$', 'user_lookup',
name='dashboards.user_lookup'),
url(r'^dashboards/topic_lookup$', 'topic_lookup',
name='dashboards.topic_lookup'),
url(r'^dashboards/localization$',
RedirectView.as_view(url='/docs/MDN/Doc_status/Overview')),
)
|
Hide DeprecationWarning for old function based generic views.
|
Hide DeprecationWarning for old function based generic views.
|
Python
|
mpl-2.0
|
jwhitlock/kuma,ollie314/kuma,chirilo/kuma,varunkamra/kuma,biswajitsahu/kuma,robhudson/kuma,openjck/kuma,RanadeepPolavarapu/kuma,scrollback/kuma,davehunt/kuma,a2sheppy/kuma,scrollback/kuma,davidyezsetz/kuma,robhudson/kuma,mozilla/kuma,davidyezsetz/kuma,nhenezi/kuma,MenZil/kuma,jezdez/kuma,RanadeepPolavarapu/kuma,escattone/kuma,jwhitlock/kuma,bluemini/kuma,utkbansal/kuma,Elchi3/kuma,biswajitsahu/kuma,mastizada/kuma,a2sheppy/kuma,SphinxKnight/kuma,darkwing/kuma,YOTOV-LIMITED/kuma,YOTOV-LIMITED/kuma,jezdez/kuma,ollie314/kuma,mozilla/kuma,jgmize/kuma,openjck/kuma,surajssd/kuma,tximikel/kuma,cindyyu/kuma,a2sheppy/kuma,davidyezsetz/kuma,whip112/Whip112,safwanrahman/kuma,varunkamra/kuma,utkbansal/kuma,jezdez/kuma,davehunt/kuma,scrollback/kuma,a2sheppy/kuma,carnell69/kuma,a2sheppy/kuma,whip112/Whip112,tximikel/kuma,whip112/Whip112,scrollback/kuma,jgmize/kuma,openjck/kuma,surajssd/kuma,biswajitsahu/kuma,ronakkhunt/kuma,utkbansal/kuma,carnell69/kuma,darkwing/kuma,SphinxKnight/kuma,openjck/kuma,anaran/kuma,bluemini/kuma,ollie314/kuma,robhudson/kuma,safwanrahman/kuma,whip112/Whip112,darkwing/kuma,robhudson/kuma,Elchi3/kuma,SphinxKnight/kuma,bluemini/kuma,darkwing/kuma,darkwing/kuma,escattone/kuma,ronakkhunt/kuma,hoosteeno/kuma,tximikel/kuma,jezdez/kuma,surajssd/kuma,jwhitlock/kuma,groovecoder/kuma,varunkamra/kuma,yfdyh000/kuma,ollie314/kuma,chirilo/kuma,FrankBian/kuma,RanadeepPolavarapu/kuma,tximikel/kuma,groovecoder/kuma,SphinxKnight/kuma,FrankBian/kuma,surajssd/kuma,davidyezsetz/kuma,utkbansal/kuma,jezdez/kuma,escattone/kuma,SphinxKnight/kuma,yfdyh000/kuma,YOTOV-LIMITED/kuma,safwanrahman/kuma,chirilo/kuma,scrollback/kuma,FrankBian/kuma,davehunt/kuma,jgmize/kuma,tximikel/kuma,nhenezi/kuma,bluemini/kuma,ollie314/kuma,FrankBian/kuma,ronakkhunt/kuma,Elchi3/kuma,biswajitsahu/kuma,cindyyu/kuma,nhenezi/kuma,jezdez/kuma,MenZil/kuma,ronakkhunt/kuma,hoosteeno/kuma,safwanrahman/kuma,mozilla/kuma,groovecoder/kuma,whip112/Whip112,openjck/kuma,hoosteeno/kuma,cindyyu/kuma,FrankBian/kuma,jgmize/kuma,MenZil/kuma,hoosteeno/kuma,jwhitlock/kuma,davehunt/kuma,carnell69/kuma,yfdyh000/kuma,MenZil/kuma,davidyezsetz/kuma,YOTOV-LIMITED/kuma,groovecoder/kuma,anaran/kuma,chirilo/kuma,YOTOV-LIMITED/kuma,cindyyu/kuma,mastizada/kuma,mastizada/kuma,yfdyh000/kuma,hoosteeno/kuma,davehunt/kuma,bluemini/kuma,mozilla/kuma,groovecoder/kuma,yfdyh000/kuma,MenZil/kuma,varunkamra/kuma,cindyyu/kuma,groovecoder/kuma,carnell69/kuma,mastizada/kuma,anaran/kuma,safwanrahman/kuma,chirilo/kuma,anaran/kuma,jwhitlock/kuma,utkbansal/kuma,chirilo/kuma,utkbansal/kuma,jgmize/kuma,anaran/kuma,nhenezi/kuma,yfdyh000/kuma,varunkamra/kuma,davehunt/kuma,bluemini/kuma,nhenezi/kuma,hoosteeno/kuma,mozilla/kuma,RanadeepPolavarapu/kuma,surajssd/kuma,RanadeepPolavarapu/kuma,surajssd/kuma,jgmize/kuma,anaran/kuma,ronakkhunt/kuma,Elchi3/kuma,carnell69/kuma,Elchi3/kuma,biswajitsahu/kuma,robhudson/kuma,ronakkhunt/kuma,tximikel/kuma,carnell69/kuma,robhudson/kuma,openjck/kuma,RanadeepPolavarapu/kuma,whip112/Whip112,safwanrahman/kuma,ollie314/kuma,darkwing/kuma,varunkamra/kuma,MenZil/kuma,YOTOV-LIMITED/kuma,cindyyu/kuma,SphinxKnight/kuma,biswajitsahu/kuma
|
---
+++
@@ -1,5 +1,5 @@
from django.conf.urls import patterns, url
-from django.views.generic.simple import redirect_to
+from django.views.generic.base import RedirectView
urlpatterns = patterns('dashboards.views',
@@ -9,6 +9,6 @@
url(r'^dashboards/topic_lookup$', 'topic_lookup',
name='dashboards.topic_lookup'),
- url(r'^dashboards/localization$', redirect_to,
- {'url': '/docs/MDN/Doc_status/Overview'}),
+ url(r'^dashboards/localization$',
+ RedirectView.as_view(url='/docs/MDN/Doc_status/Overview')),
)
|
1a10f21566f59c9f4f8171bc088af1e2a18d9702
|
prestoadmin/_version.py
|
prestoadmin/_version.py
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.3'
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.4-SNAPSHOT'
|
Prepare for the next development iteration
|
Prepare for the next development iteration
|
Python
|
apache-2.0
|
prestodb/presto-admin,prestodb/presto-admin
|
---
+++
@@ -15,4 +15,4 @@
# This must be the last line in the file and the format must be maintained
# even when the version is changed
-__version__ = '2.3'
+__version__ = '2.4-SNAPSHOT'
|
8a254f3b80016bf9d2a048191e947cb66993cc7a
|
bin/reactobus.py
|
bin/reactobus.py
|
#!/usr/bin/python3
import argparse
def main():
# Parse the command line
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--conf", default="/etc/reactobus.yaml",
help="ReactOBus configuration")
loggrp = parser.add_argument_group('Logging')
loggrp.add_argument("-l", "--level", default="INFO", type=str,
choices=["DEBUG", "ERROR", "INFO", "WARN"],
help="Log level (DEBUG, ERROR, INFO, WARN), default to INFO")
loggrp.add_argument("--log-file", default="-", type=str,
help="Log file, use '-' for stdout")
options = parser.parse_args()
if __name__ == '__main__':
main()
|
#!/usr/bin/python3
import argparse
import logging
import sys
FORMAT = "%(asctime)-15s %(levelname)s %(message)s"
LOG = logging.getLogger("ReactOBus")
def configure_logger(log_file, level):
if level == "ERROR":
LOG.setLevel(logging.ERROR)
elif level == "WARN":
LOG.setLevel(logging.WARN)
elif level == "INFO":
LOG.setLevel(logging.INFO)
else:
LOG.setLevel(logging.DEBUG)
if log_file == "-":
handler = logging.StreamHandler(sys.stdout)
else:
handler = logging.FileHandler(log_file, "a")
handler.setFormatter(logging.Formatter(FORMAT))
LOG.addHandler(handler)
def main():
# Parse the command line
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--conf", default="/etc/reactobus.yaml",
help="ReactOBus configuration")
loggrp = parser.add_argument_group('Logging')
loggrp.add_argument("-l", "--level", default="INFO", type=str,
choices=["DEBUG", "ERROR", "INFO", "WARN"],
help="Log level (DEBUG, ERROR, INFO, WARN), default to INFO")
loggrp.add_argument("--log-file", default="-", type=str,
help="Log file, use '-' for stdout")
options = parser.parse_args()
# Configure the logger
configure_logger(options.log_file, options.level)
LOG.error("une erreur")
LOG.warning("un warning")
LOG.info("une info")
LOG.debug("une ligne de debug")
if __name__ == '__main__':
main()
|
Add logging and argument parsing
|
Add logging and argument parsing
|
Python
|
agpl-3.0
|
ivoire/ReactOBus,ivoire/ReactOBus
|
---
+++
@@ -1,6 +1,29 @@
#!/usr/bin/python3
import argparse
+import logging
+import sys
+
+FORMAT = "%(asctime)-15s %(levelname)s %(message)s"
+LOG = logging.getLogger("ReactOBus")
+
+
+def configure_logger(log_file, level):
+ if level == "ERROR":
+ LOG.setLevel(logging.ERROR)
+ elif level == "WARN":
+ LOG.setLevel(logging.WARN)
+ elif level == "INFO":
+ LOG.setLevel(logging.INFO)
+ else:
+ LOG.setLevel(logging.DEBUG)
+
+ if log_file == "-":
+ handler = logging.StreamHandler(sys.stdout)
+ else:
+ handler = logging.FileHandler(log_file, "a")
+ handler.setFormatter(logging.Formatter(FORMAT))
+ LOG.addHandler(handler)
def main():
@@ -17,5 +40,14 @@
options = parser.parse_args()
+ # Configure the logger
+ configure_logger(options.log_file, options.level)
+
+ LOG.error("une erreur")
+ LOG.warning("un warning")
+ LOG.info("une info")
+ LOG.debug("une ligne de debug")
+
+
if __name__ == '__main__':
main()
|
2f084990d919855a4b1e4bb909c607ef91810fba
|
knights/dj.py
|
knights/dj.py
|
from collections import defaultdict
from django.template import TemplateDoesNotExist, TemplateSyntaxError # NOQA
from django.template.backends.base import BaseEngine
from django.template.backends.utils import csrf_input_lazy, csrf_token_lazy
from . import compiler
from . import loader
class KnightsTemplater(BaseEngine):
def __init__(self, params):
params = params.copy()
options = params.pop('OPTIONS').copy()
super(KnightsTemplater, self).__init__(params)
for path in params.get('DIRS', []):
loader.add_path(path)
def from_string(self, template_code):
tmpl = compiler.kompile(template_code)
return Template(tmpl)
def get_template(self, template_name):
tmpl = loader.load_template(template_name)
if tmpl is None:
raise TemplateDoesNotExist(template_name)
return Template(tmpl)
class Template(object):
def __init__(self, template):
self.template = template
def render(self, context=None, request=None):
if context is None:
context = {}
if request is not None:
context['request'] = request
context['csrf_input'] = csrf_input_lazy(request)
context['csrf_token'] = csrf_token_lazy(request)
ctx = defaultdict(str)
ctx.update(context)
return self.template(ctx)
|
from collections import defaultdict
from django.template import TemplateDoesNotExist, TemplateSyntaxError # NOQA
from django.template.backends.base import BaseEngine
from django.template.backends.utils import csrf_input_lazy, csrf_token_lazy
from . import compiler
from . import loader
class KnightsTemplater(BaseEngine):
def __init__(self, params):
params = params.copy()
options = params.pop('OPTIONS').copy()
super(KnightsTemplater, self).__init__(params)
def from_string(self, template_code):
tmpl = compiler.kompile(template_code)
return Template(tmpl)
def get_template(self, template_name):
try:
tmpl = loader.load_template(template_name, self.template_dirs)
except Exception as e:
raise TemplateSyntaxError(e).with_traceback(e.__traceback__)
if tmpl is None:
raise TemplateDoesNotExist(template_name)
return Template(tmpl)
class Template(object):
def __init__(self, template):
self.template = template
def render(self, context=None, request=None):
if context is None:
context = {}
if request is not None:
context['user'] = request.user
context['request'] = request
context['csrf_input'] = csrf_input_lazy(request)
context['csrf_token'] = csrf_token_lazy(request)
ctx = defaultdict(str)
ctx.update(context)
return self.template(ctx)
|
Use built in template dirs list Add user to context
|
Use built in template dirs list
Add user to context
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
---
+++
@@ -16,15 +16,15 @@
super(KnightsTemplater, self).__init__(params)
- for path in params.get('DIRS', []):
- loader.add_path(path)
-
def from_string(self, template_code):
tmpl = compiler.kompile(template_code)
return Template(tmpl)
def get_template(self, template_name):
- tmpl = loader.load_template(template_name)
+ try:
+ tmpl = loader.load_template(template_name, self.template_dirs)
+ except Exception as e:
+ raise TemplateSyntaxError(e).with_traceback(e.__traceback__)
if tmpl is None:
raise TemplateDoesNotExist(template_name)
return Template(tmpl)
@@ -39,6 +39,7 @@
if context is None:
context = {}
if request is not None:
+ context['user'] = request.user
context['request'] = request
context['csrf_input'] = csrf_input_lazy(request)
context['csrf_token'] = csrf_token_lazy(request)
|
720537726b3f1eb88e67ec7454ddddbee1f123fa
|
benchmarks/variables.py
|
benchmarks/variables.py
|
# Copyright 2022 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import dimod
class TimeIteration:
num_variables = 1000
variables = dict(string=dimod.variables.Variables(map(str, range(num_variables))),
index=dimod.variables.Variables(range(num_variables)),
integer=dimod.variables.Variables(range(num_variables, 0, -1))
)
params = variables.keys()
param_names = ['labels']
def time_iteration(self, key):
for v in self.variables[key]:
pass
|
# Copyright 2022 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dimod.variables import Variables
class TimeConstuction:
num_variables = 1000
iterables = dict(range=range(num_variables),
strings=list(map(str, range(num_variables))),
integers=list(range(1000)),
empty=[],
none=None,
)
params = iterables.keys()
param_names = ['iterable']
def time_construction(self, key):
Variables(self.iterables[key])
class TimeIteration:
num_variables = 1000
variables = dict(string=Variables(map(str, range(num_variables))),
index=Variables(range(num_variables)),
integer=Variables(range(num_variables, 0, -1))
)
params = variables.keys()
param_names = ['labels']
def time_iteration(self, key):
for v in self.variables[key]:
pass
|
Add benchmarks for Variables construction
|
Add benchmarks for Variables construction
|
Python
|
apache-2.0
|
dwavesystems/dimod,dwavesystems/dimod
|
---
+++
@@ -12,15 +12,32 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import dimod
+from dimod.variables import Variables
+
+
+class TimeConstuction:
+ num_variables = 1000
+
+ iterables = dict(range=range(num_variables),
+ strings=list(map(str, range(num_variables))),
+ integers=list(range(1000)),
+ empty=[],
+ none=None,
+ )
+
+ params = iterables.keys()
+ param_names = ['iterable']
+
+ def time_construction(self, key):
+ Variables(self.iterables[key])
class TimeIteration:
num_variables = 1000
- variables = dict(string=dimod.variables.Variables(map(str, range(num_variables))),
- index=dimod.variables.Variables(range(num_variables)),
- integer=dimod.variables.Variables(range(num_variables, 0, -1))
+ variables = dict(string=Variables(map(str, range(num_variables))),
+ index=Variables(range(num_variables)),
+ integer=Variables(range(num_variables, 0, -1))
)
params = variables.keys()
|
e6dc05681fdf20b4dd2683fdc52991645cfbaf59
|
shuup/admin/modules/attributes/views/list.py
|
shuup/admin/modules/attributes/views/list.py
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django.db.models import Count
from django.utils.translation import ugettext_lazy as _
from shuup.admin.utils.picotable import ChoicesFilter, Column, TextFilter
from shuup.admin.utils.views import PicotableListView
from shuup.core.models import Attribute, AttributeType, AttributeVisibility
class AttributeListView(PicotableListView):
model = Attribute
columns = [
Column("identifier", _("Identifier"), filter_config=TextFilter(
filter_field="identifier",
placeholder=_("Filter by identifier...")
)),
Column("name", _("Name"), sort_field="translations__name", display="name", filter_config=TextFilter(
filter_field="translations__name",
placeholder=_("Filter by name...")
)),
Column("type", _("Type"), filter_config=ChoicesFilter(AttributeType.choices)),
Column("visibility_mode", _("Visibility Mode"), filter_config=ChoicesFilter(AttributeVisibility.choices)),
Column("searchable", _("Searchable")),
Column("n_product_types", _("Used in # Product Types")),
]
def get_queryset(self):
return Attribute.objects.all().annotate(n_product_types=Count("product_types"))
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django.db.models import Count
from django.utils.translation import ugettext_lazy as _
from shuup.admin.utils.picotable import ChoicesFilter, Column, TextFilter
from shuup.admin.utils.views import PicotableListView
from shuup.core.models import Attribute, AttributeType, AttributeVisibility
class AttributeListView(PicotableListView):
model = Attribute
default_columns = [
Column("identifier", _("Identifier"), filter_config=TextFilter(
filter_field="identifier",
placeholder=_("Filter by identifier...")
)),
Column("name", _("Name"), sort_field="translations__name", display="name", filter_config=TextFilter(
filter_field="translations__name",
placeholder=_("Filter by name...")
)),
Column("type", _("Type"), filter_config=ChoicesFilter(AttributeType.choices)),
Column("visibility_mode", _("Visibility Mode"), filter_config=ChoicesFilter(AttributeVisibility.choices)),
Column("searchable", _("Searchable")),
Column("n_product_types", _("Used in # Product Types")),
]
def get_queryset(self):
return Attribute.objects.all().annotate(n_product_types=Count("product_types"))
|
Modify attributes for dynamic columns
|
Modify attributes for dynamic columns
Refs SH-64
|
Python
|
agpl-3.0
|
suutari/shoop,shoopio/shoop,shawnadelic/shuup,shawnadelic/shuup,shoopio/shoop,suutari-ai/shoop,suutari-ai/shoop,suutari-ai/shoop,suutari/shoop,shawnadelic/shuup,shoopio/shoop,suutari/shoop
|
---
+++
@@ -17,7 +17,7 @@
class AttributeListView(PicotableListView):
model = Attribute
- columns = [
+ default_columns = [
Column("identifier", _("Identifier"), filter_config=TextFilter(
filter_field="identifier",
placeholder=_("Filter by identifier...")
|
9de844864b3e6c732241a68d1871f701232d2733
|
celery_janitor/utils.py
|
celery_janitor/utils.py
|
import importlib
import urlparse
from celery_janitor import conf
from celery_janitor.exceptions import BackendNotSupportedException
BACKEND_MAPPING = {
'sqs': 'celery_janitor.backends.sqs.SQSBackend'
}
def import_class(path):
path_bits = path.split('.')
class_name = path_bits.pop()
module_path = '.'.join(path_bits)
module_itself = importlib.import_module(module_path)
if not hasattr(module_itself, class_name):
raise ImportError("Module '%s' has no '%s' class." % (module_path, class_name))
return getattr(module_itself, class_name)
class Config(object):
def __init__(self):
self.broker = urlparse.urlparse(conf.BROKER_URL)
def get_backend_class(self):
try:
return BACKEND_MAPPING[self.broker.scheme]
except KeyError:
raise BackendNotSupportedException(
"{} not supported".format(self.broker.scheme))
def get_credentials(self):
if self.broker.scheme == 'sqs':
access_id, access_secret = self.broker.netloc.split(':')
access_secret = access_secret[:-1]
return (access_id, access_secret)
def get_backend():
config = Config()
backend_class = config.get_backend()
backend = import_class(backend_class)
return backend(*config.get_credentials())
|
import importlib
try:
from urlparse import urlparse
except ImportError: # Python 3.x
from urllib.parse import urlparse
from celery_janitor import conf
from celery_janitor.exceptions import BackendNotSupportedException
BACKEND_MAPPING = {
'sqs': 'celery_janitor.backends.sqs.SQSBackend'
}
def import_class(path):
path_bits = path.split('.')
class_name = path_bits.pop()
module_path = '.'.join(path_bits)
module_itself = importlib.import_module(module_path)
if not hasattr(module_itself, class_name):
raise ImportError("Module '%s' has no '%s' class." % (module_path, class_name))
return getattr(module_itself, class_name)
class Config(object):
def __init__(self):
self.broker = urlparse(conf.BROKER_URL)
def get_backend_class(self):
try:
return BACKEND_MAPPING[self.broker.scheme]
except KeyError:
raise BackendNotSupportedException(
"{} not supported".format(self.broker.scheme))
def get_credentials(self):
if self.broker.scheme == 'sqs':
access_id, access_secret = self.broker.netloc.split(':')
access_secret = access_secret[:-1]
return (access_id, access_secret)
def get_backend():
config = Config()
backend_class = config.get_backend()
backend = import_class(backend_class)
return backend(*config.get_credentials())
|
Fix Python 3.4 import error
|
Fix Python 3.4 import error
|
Python
|
mit
|
comandrei/celery-janitor
|
---
+++
@@ -1,5 +1,8 @@
import importlib
-import urlparse
+try:
+ from urlparse import urlparse
+except ImportError: # Python 3.x
+ from urllib.parse import urlparse
from celery_janitor import conf
from celery_janitor.exceptions import BackendNotSupportedException
@@ -25,7 +28,7 @@
class Config(object):
def __init__(self):
- self.broker = urlparse.urlparse(conf.BROKER_URL)
+ self.broker = urlparse(conf.BROKER_URL)
def get_backend_class(self):
try:
|
8fc43046ebfaa41410c28ba6d3d27fffed25ee4e
|
var/spack/repos/builtin/packages/glm/package.py
|
var/spack/repos/builtin/packages/glm/package.py
|
from spack import *
class Glm(Package):
"""
OpenGL Mathematics (GLM) is a header only C++ mathematics library for graphics software based on
the OpenGL Shading Language (GLSL) specification.
"""
homepage = "https://github.com/g-truc/glm"
url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz"
version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
cmake('..', *std_cmake_args)
make()
make("install")
|
from spack import *
class Glm(Package):
"""
OpenGL Mathematics (GLM) is a header only C++ mathematics library for graphics software based on
the OpenGL Shading Language (GLSL) specification.
"""
homepage = "https://github.com/g-truc/glm"
url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz"
version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8')
depends_on ("cmake")
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
cmake('..', *std_cmake_args)
make()
make("install")
|
Add missing dependency for glm
|
Add missing dependency for glm
|
Python
|
lgpl-2.1
|
TheTimmy/spack,EmreAtes/spack,TheTimmy/spack,tmerrick1/spack,LLNL/spack,tmerrick1/spack,skosukhin/spack,matthiasdiener/spack,lgarren/spack,skosukhin/spack,TheTimmy/spack,LLNL/spack,EmreAtes/spack,matthiasdiener/spack,mfherbst/spack,matthiasdiener/spack,krafczyk/spack,tmerrick1/spack,EmreAtes/spack,skosukhin/spack,matthiasdiener/spack,lgarren/spack,EmreAtes/spack,LLNL/spack,iulian787/spack,lgarren/spack,lgarren/spack,tmerrick1/spack,krafczyk/spack,iulian787/spack,matthiasdiener/spack,krafczyk/spack,EmreAtes/spack,mfherbst/spack,mfherbst/spack,TheTimmy/spack,skosukhin/spack,skosukhin/spack,krafczyk/spack,mfherbst/spack,tmerrick1/spack,iulian787/spack,LLNL/spack,TheTimmy/spack,iulian787/spack,mfherbst/spack,lgarren/spack,krafczyk/spack,LLNL/spack,iulian787/spack
|
---
+++
@@ -11,6 +11,8 @@
url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz"
version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8')
+
+ depends_on ("cmake")
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
|
875f70c0c43b6fdc5825525e8ccfd137cecb2bfe
|
malcolm/modules/builtin/parts/helppart.py
|
malcolm/modules/builtin/parts/helppart.py
|
from annotypes import Anno
from malcolm.core import Part, PartRegistrar, StringMeta, Widget, APartName
from ..util import set_tags
with Anno("The URL that gives some help documentation for this Block"):
AHelpUrl = str
# Pull re-used annotypes into our namespace in case we are subclassed
APartName = APartName
class HelpPart(Part):
"""Part representing a link to some help documentation for the GUI"""
def __init__(self, help_url, name="help"):
# type: (AHelpUrl, APartName) -> None
super(HelpPart, self).__init__(name)
meta = StringMeta("Help documentation for the Block")
set_tags(meta, widget=Widget.HELP)
self.attr = meta.create_attribute_model(help_url)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(self.name, self.attr)
|
from annotypes import Anno
from malcolm.core import Part, PartRegistrar, StringMeta, Widget, APartName
from ..util import set_tags
with Anno("The URL that gives some help documentation for this Block"):
AHelpUrl = str
with Anno("The description of what the help documentation is about"):
ADesc = str
# Pull re-used annotypes into our namespace in case we are subclassed
APartName = APartName
class HelpPart(Part):
"""Part representing a link to some help documentation for the GUI"""
def __init__(self,
help_url, # type: AHelpUrl
name="help", # type: APartName
description="Help documentation for the Block" # type: ADesc
):
# type: (...) -> None
super(HelpPart, self).__init__(name)
meta = StringMeta(description)
set_tags(meta, widget=Widget.HELP)
self.attr = meta.create_attribute_model(help_url)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(self.name, self.attr)
|
Allow description to be changed in HelpPart
|
Allow description to be changed in HelpPart
|
Python
|
apache-2.0
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
---
+++
@@ -6,6 +6,9 @@
with Anno("The URL that gives some help documentation for this Block"):
AHelpUrl = str
+with Anno("The description of what the help documentation is about"):
+ ADesc = str
+
# Pull re-used annotypes into our namespace in case we are subclassed
APartName = APartName
@@ -13,10 +16,14 @@
class HelpPart(Part):
"""Part representing a link to some help documentation for the GUI"""
- def __init__(self, help_url, name="help"):
- # type: (AHelpUrl, APartName) -> None
+ def __init__(self,
+ help_url, # type: AHelpUrl
+ name="help", # type: APartName
+ description="Help documentation for the Block" # type: ADesc
+ ):
+ # type: (...) -> None
super(HelpPart, self).__init__(name)
- meta = StringMeta("Help documentation for the Block")
+ meta = StringMeta(description)
set_tags(meta, widget=Widget.HELP)
self.attr = meta.create_attribute_model(help_url)
|
0468c944464d55ba7ce0a821e1085ae530d49cf6
|
corehq/apps/es/cases.py
|
corehq/apps/es/cases.py
|
from .es_query import HQESQuery
from . import filters
class CaseES(HQESQuery):
index = 'cases'
@property
def builtin_filters(self):
return [
opened_range,
closed_range,
is_closed,
case_type,
] + super(CaseES, self).builtin_filters
def opened_range(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('opened_on', gt, gte, lt, lte)
def closed_range(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('closed_on', gt, gte, lt, lte)
def is_closed(closed=True):
return filters.term('closed', closed)
def case_type(type_):
return filters.term('type.exact', type_)
|
from .es_query import HQESQuery
from . import filters
class CaseES(HQESQuery):
index = 'cases'
@property
def builtin_filters(self):
return [
opened_range,
closed_range,
is_closed,
case_type,
owner,
] + super(CaseES, self).builtin_filters
def opened_range(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('opened_on', gt, gte, lt, lte)
def closed_range(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('closed_on', gt, gte, lt, lte)
def is_closed(closed=True):
return filters.term('closed', closed)
def case_type(type_):
return filters.term('type.exact', type_)
def owner(owner_id):
return filters.term('owner_id', owner_id)
|
Add `owner` filter to CaseES
|
Add `owner` filter to CaseES
|
Python
|
bsd-3-clause
|
puttarajubr/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
|
---
+++
@@ -12,6 +12,7 @@
closed_range,
is_closed,
case_type,
+ owner,
] + super(CaseES, self).builtin_filters
@@ -29,3 +30,7 @@
def case_type(type_):
return filters.term('type.exact', type_)
+
+
+def owner(owner_id):
+ return filters.term('owner_id', owner_id)
|
ef11e9d0247fbd10e317d30ca8898f9a3c079e37
|
cyder/base/tests/__init__.py
|
cyder/base/tests/__init__.py
|
from exceptions import AssertionError
from django.core.exceptions import ValidationError
from django.test import TestCase
from django.test.client import Client
from cyder.core.ctnr.models import Ctnr
class CyTestMixin(object):
"""
Mixin for all tests.
"""
def _pre_setup(self):
super(TestCase, self)._pre_setup()
# Add ctnrs to session.
session = self.client.session
session['ctnr'] = Ctnr.objects.get(id=2)
session['ctnrs'] = list(Ctnr.objects.all())
session.save()
def assertObjectsConflict(self, obj_create_list):
pairs = [(x,y)
for x in obj_create_list
for y in obj_create_list
if x != y]
for first, second in pairs:
x = first()
try:
second()
except ValidationError:
pass
else:
raise AssertionError(
"'{}' and '{}' do not conflict".format(first.name,
second.name))
x.delete()
def assertObjectsDontConflict(self, obj_create_list):
pairs = [(x,y)
for x in obj_create_list
for y in obj_create_list
if x != y]
for first, second in pairs:
x = first()
y = second()
y.delete()
x.delete()
class TestCase(TestCase, CyTestMixin):
"""
Base class for all tests.
"""
client_class = Client
fixtures = ['core/users']
|
from exceptions import AssertionError
from django.core.exceptions import ValidationError
from django.test import TestCase
from django.test.client import Client
from cyder.core.ctnr.models import Ctnr
class CyTestMixin(object):
"""
Mixin for all tests.
"""
def _pre_setup(self):
super(TestCase, self)._pre_setup()
# Add ctnrs to session.
session = self.client.session
session['ctnr'] = Ctnr.objects.get(id=2)
session['ctnrs'] = list(Ctnr.objects.all())
session.save()
def assertObjectsConflict(self, obj_create_list):
pairs = [(a,b)
for a in obj_create_list
for b in obj_create_list
if a != b]
for first, second in pairs:
x = first()
try:
second()
except ValidationError:
pass
else:
raise AssertionError(
"'{}' and '{}' do not conflict".format(first.name,
second.name))
x.delete()
def assertObjectsDontConflict(self, obj_create_list):
pairs = [(a,b)
for a in obj_create_list
for b in obj_create_list
if a != b]
for first, second in pairs:
x = first()
y = second()
y.delete()
x.delete()
class TestCase(TestCase, CyTestMixin):
"""
Base class for all tests.
"""
client_class = Client
fixtures = ['core/users']
|
Change variable names to reduce confusion
|
Change variable names to reduce confusion
|
Python
|
bsd-3-clause
|
murrown/cyder,drkitty/cyder,akeym/cyder,drkitty/cyder,OSU-Net/cyder,OSU-Net/cyder,zeeman/cyder,akeym/cyder,zeeman/cyder,akeym/cyder,akeym/cyder,drkitty/cyder,OSU-Net/cyder,zeeman/cyder,drkitty/cyder,zeeman/cyder,OSU-Net/cyder,murrown/cyder,murrown/cyder,murrown/cyder
|
---
+++
@@ -21,10 +21,10 @@
session.save()
def assertObjectsConflict(self, obj_create_list):
- pairs = [(x,y)
- for x in obj_create_list
- for y in obj_create_list
- if x != y]
+ pairs = [(a,b)
+ for a in obj_create_list
+ for b in obj_create_list
+ if a != b]
for first, second in pairs:
x = first()
@@ -39,10 +39,10 @@
x.delete()
def assertObjectsDontConflict(self, obj_create_list):
- pairs = [(x,y)
- for x in obj_create_list
- for y in obj_create_list
- if x != y]
+ pairs = [(a,b)
+ for a in obj_create_list
+ for b in obj_create_list
+ if a != b]
for first, second in pairs:
x = first()
|
a42a7e237a72825080fa0afea263dbd5766417bb
|
conary/lib/digestlib.py
|
conary/lib/digestlib.py
|
#
# Copyright (c) 2004-2008 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
"Compatibility module for python 2.4 - 2.6"
try:
import hashlib
sha1 = hashlib.sha1
md5 = hashlib.md5
sha256 = hashlib.sha256
except ImportError:
import sha
import md5
from Crypto.Hash import SHA256
sha1 = sha.new
md5 = md5.new
sha256 = SHA256.new
|
#
# Copyright (c) 2004-2008 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
"Compatibility module for python 2.4 - 2.6"
try:
import hashlib
sha1 = hashlib.sha1
md5 = hashlib.md5
except ImportError:
import sha
import md5
sha1 = sha.new
md5 = md5.new
from Crypto.Hash import SHA256
sha256 = SHA256.new
|
Use sha256 algorithm from pycrypto.
|
Use sha256 algorithm from pycrypto.
|
Python
|
apache-2.0
|
fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary
|
---
+++
@@ -18,11 +18,10 @@
import hashlib
sha1 = hashlib.sha1
md5 = hashlib.md5
- sha256 = hashlib.sha256
except ImportError:
import sha
import md5
- from Crypto.Hash import SHA256
sha1 = sha.new
md5 = md5.new
- sha256 = SHA256.new
+from Crypto.Hash import SHA256
+sha256 = SHA256.new
|
9a527d999075a92de3db174e0696e961c05041c4
|
dead_mailer.py
|
dead_mailer.py
|
#!/usr/bin/env python3
import boto3
client = boto3.client('ses')
client.send_email(
Source='david@severski.net',
Message={
'Subject': {
'Data': 'Here!',
},
'Body': {
'Text': {
'Data': "I'm not dead yet!",
}
}
},
Destination={'ToAddresses': ['davidski@deadheaven.com']}
)
|
#!/usr/bin/env python
import boto3
from random import sample
def acknowledge_send():
print "Acknowledge"
def indicate_failure():
print "Failure"
def set_message_body(selection):
switcher = {
'0': "I'm here!",
'1': "Brrrraaaaains!",
'2': "Arrived!"
}
return switcher.get(str(selection), 'nothing')
if __name__ == "__main__":
client = boto3.client('ses')
# read input
selection = sample([0, 1, 2], 1)[0]
#selection = 1
# set message body
message_body = set_message_body(selection)
# send email
response = client.send_email(
Source='david@severski.net',
Message={
'Subject': {
'Data': 'Here!',
},
'Body': {
'Text': {
'Data': message_body,
}
}
},
Destination={'ToAddresses': ['davidski@deadheaven.com']}
)
# check response
if 'MessageId' in response:
print('Sent, with a message ID of {}'.format(response['MessageId']))
acknowledge_send()
else:
print('Could not find a valid response')
print response
indicate_failure()
|
Break down into core functions
|
Break down into core functions
|
Python
|
mit
|
davidski/imnotdeadyet,davidski/imnotdeadyet
|
---
+++
@@ -1,21 +1,57 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
import boto3
+from random import sample
-client = boto3.client('ses')
+def acknowledge_send():
+ print "Acknowledge"
-client.send_email(
- Source='david@severski.net',
- Message={
- 'Subject': {
- 'Data': 'Here!',
+
+def indicate_failure():
+ print "Failure"
+
+
+def set_message_body(selection):
+ switcher = {
+ '0': "I'm here!",
+ '1': "Brrrraaaaains!",
+ '2': "Arrived!"
+ }
+ return switcher.get(str(selection), 'nothing')
+
+
+if __name__ == "__main__":
+
+ client = boto3.client('ses')
+
+ # read input
+ selection = sample([0, 1, 2], 1)[0]
+ #selection = 1
+
+ # set message body
+ message_body = set_message_body(selection)
+
+ # send email
+ response = client.send_email(
+ Source='david@severski.net',
+ Message={
+ 'Subject': {
+ 'Data': 'Here!',
+ },
+ 'Body': {
+ 'Text': {
+ 'Data': message_body,
+ }
+ }
},
- 'Body': {
- 'Text': {
- 'Data': "I'm not dead yet!",
- }
- }
- },
- Destination={'ToAddresses': ['davidski@deadheaven.com']}
-)
+ Destination={'ToAddresses': ['davidski@deadheaven.com']}
+ )
+ # check response
+ if 'MessageId' in response:
+ print('Sent, with a message ID of {}'.format(response['MessageId']))
+ acknowledge_send()
+ else:
+ print('Could not find a valid response')
+ print response
+ indicate_failure()
|
7d7afb7d92797b48f215505579e0fb872deee0f3
|
rst2pdf/utils.py
|
rst2pdf/utils.py
|
# -*- coding: utf-8 -*-
# See LICENSE.txt for licensing terms
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import shlex
from reportlab.platypus import Spacer
from flowables import *
def parseRaw(data):
"""Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
"""
elements = []
lines = data.splitlines()
for line in lines:
lexer = shlex.shlex(line)
lexer.whitespace += ','
tokens = list(lexer)
command = tokens[0]
if command == 'PageBreak':
if len(tokens) == 1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]), int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now:
# def depth(node):
# if node.parent == None:
# return 0
# else:
# return 1 + depth(node.parent)
|
# -*- coding: utf-8 -*-
# See LICENSE.txt for licensing terms
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import shlex
from reportlab.platypus import Spacer
from flowables import *
from styles import adjustUnits
def parseRaw(data):
"""Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
"""
elements = []
lines = data.splitlines()
for line in lines:
lexer = shlex.shlex(line)
lexer.whitespace += ','
tokens = list(lexer)
command = tokens[0]
if command == 'PageBreak':
if len(tokens) == 1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(adjustUnits(tokens[1]),
adjustUnits(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now:
# def depth(node):
# if node.parent == None:
# return 0
# else:
# return 1 + depth(node.parent)
|
Add unit support for spacers
|
Add unit support for spacers
|
Python
|
mit
|
rafaelmartins/rst2pdf,rafaelmartins/rst2pdf
|
---
+++
@@ -9,7 +9,7 @@
from reportlab.platypus import Spacer
from flowables import *
-
+from styles import adjustUnits
def parseRaw(data):
"""Parse and process a simple DSL to handle creation of flowables.
@@ -34,7 +34,8 @@
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
- elements.append(Spacer(int(tokens[1]), int(tokens[2])))
+ elements.append(Spacer(adjustUnits(tokens[1]),
+ adjustUnits(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
|
ff4c708d66f2d176697f01227061a9791e7d2488
|
statscache/utils.py
|
statscache/utils.py
|
import pkg_resources
import logging
log = logging.getLogger("fedmsg")
def find_stats_consumer(hub):
for cons in hub.consumers:
if 'StatsConsumer' in str(type(cons)):
return cons
raise ValueError('StatsConsumer not found.')
class memoized(object):
def __init__(self, func):
self.func = func
self.results = {}
def __call__(self, *args, **kwargs):
key = hash(str(args)) + hash(str(kwargs))
if self.results.get(key) is None:
self.results[key] = self.func(*args, **kwargs)
return self.results[key]
@memoized
def load_plugins(frequency, config):
plugins = []
entry_points = pkg_resources.iter_entry_points('statscache.plugin')
for entry_point in entry_points:
try:
module = entry_point.load()
model = module.make_model(frequency)
plugin = module.Plugin(config, model)
plugins.append(plugin)
except Exception:
log.exception("Failed to load plugin %r" % entry_point)
return plugins
def get_model(idx, frequency, config):
plugins = load_plugins(frequency, config)
for plugin in reversed(plugins):
if plugin.idx == idx:
return plugin.model
raise KeyError("No such model for %r %r" % (idx, frequency))
|
import pkg_resources
import logging
log = logging.getLogger("fedmsg")
def find_stats_consumer(hub):
for cons in hub.consumers:
if 'StatsConsumer' in str(type(cons)):
return cons
raise ValueError('StatsConsumer not found.')
class memoized(object):
def __init__(self, func):
self.func = func
self.results = {}
def __call__(self, *args, **kwargs):
key = hash(str(args)) + hash(str(kwargs))
if self.results.get(key) is None:
self.results[key] = self.func(*args, **kwargs)
return self.results[key]
@memoized
def load_plugins(frequency, config):
plugins = []
entry_points = pkg_resources.iter_entry_points('statscache.plugin')
for entry_point in entry_points:
try:
module = entry_point.load()
module_frequencies = getattr(module, 'FREQUENCIES')
if module_frequencies is not None and \
frequency not in module_frequencies:
continue
model = module.make_model(frequency)
plugin = module.Plugin(config, model)
plugins.append(plugin)
except Exception:
log.exception("Failed to load plugin %r" % entry_point)
return plugins
def get_model(idx, frequency, config):
plugins = load_plugins(frequency, config)
for plugin in reversed(plugins):
if plugin.idx == idx:
return plugin.model
raise KeyError("No such model for %r %r" % (idx, frequency))
|
Allow plugins to specify permissible update frequencies.
|
Allow plugins to specify permissible update frequencies.
Fixed #2.
|
Python
|
lgpl-2.1
|
yazman/statscache,yazman/statscache,yazman/statscache
|
---
+++
@@ -31,6 +31,10 @@
for entry_point in entry_points:
try:
module = entry_point.load()
+ module_frequencies = getattr(module, 'FREQUENCIES')
+ if module_frequencies is not None and \
+ frequency not in module_frequencies:
+ continue
model = module.make_model(frequency)
plugin = module.Plugin(config, model)
plugins.append(plugin)
|
a36033badfa90fde764b136fa1e713dbb267a02b
|
depot/admin.py
|
depot/admin.py
|
from django.contrib import admin
from .models import Depot, Item
# make items modifiable by admin
admin.site.register(Item)
class DepotAdmin(admin.ModelAdmin):
list_display = ['name', 'active']
ordering = ['name']
actions = ["make_archived", "make_restored"]
def make_message(self, num_changed, change):
if num_changed == 1:
message = "1 depot was"
else:
message = "%s depots were" % num_changed
return "%s successfully marked as %s" % (message, change)
def make_archived(self, request, queryset):
depots_archived = queryset.update(active = False)
self.message_user(request, self.make_message(depots_archived, "archived"))
make_archived.short_description = "Archive selected depots"
def make_restored(self, request, queryset):
depots_restored = queryset.update(active = True)
self.message_user(request, self.make_message(depots_restored, "restored"))
make_restored.short_description = "Restore selected depots"
# make depots modifiable by admin
admin.site.register(Depot, DepotAdmin)
|
from django.contrib import admin
from .models import Depot, Item
# make items modifiable by admin
admin.site.register(Item)
class DepotAdmin(admin.ModelAdmin):
list_display = ['name', 'active']
ordering = ['name']
actions = ["make_archived", "make_restored"]
@staticmethod
def format_message(num_changed, change):
if num_changed == 1:
message = "1 depot was"
else:
message = "%s depots were" % num_changed
return "%s successfully marked as %s" % (message, change)
def make_archived(self, request, queryset):
depots_archived = queryset.update(active=False)
self.message_user(request, DepotAdmin.format_message(depots_archived, "archived"))
make_archived.short_description = "Archive selected depots"
def make_restored(self, request, queryset):
depots_restored = queryset.update(active=True)
self.message_user(request, DepotAdmin.format_message(depots_restored, "restored"))
make_restored.short_description = "Restore selected depots"
# make depots modifiable by admin
admin.site.register(Depot, DepotAdmin)
|
Fix pylint complaining about spaces and other stuff
|
Fix pylint complaining about spaces and other stuff
|
Python
|
agpl-3.0
|
verleihtool/verleihtool,verleihtool/verleihtool,verleihtool/verleihtool,verleihtool/verleihtool
|
---
+++
@@ -13,7 +13,8 @@
actions = ["make_archived", "make_restored"]
- def make_message(self, num_changed, change):
+ @staticmethod
+ def format_message(num_changed, change):
if num_changed == 1:
message = "1 depot was"
else:
@@ -21,13 +22,13 @@
return "%s successfully marked as %s" % (message, change)
def make_archived(self, request, queryset):
- depots_archived = queryset.update(active = False)
- self.message_user(request, self.make_message(depots_archived, "archived"))
+ depots_archived = queryset.update(active=False)
+ self.message_user(request, DepotAdmin.format_message(depots_archived, "archived"))
make_archived.short_description = "Archive selected depots"
def make_restored(self, request, queryset):
- depots_restored = queryset.update(active = True)
- self.message_user(request, self.make_message(depots_restored, "restored"))
+ depots_restored = queryset.update(active=True)
+ self.message_user(request, DepotAdmin.format_message(depots_restored, "restored"))
make_restored.short_description = "Restore selected depots"
# make depots modifiable by admin
|
5651445944bce163a2c3f746d6ac1acd9ae76032
|
numpy/array_api/tests/test_asarray.py
|
numpy/array_api/tests/test_asarray.py
|
import numpy as np
def test_fast_return():
""""""
a = np.array([1, 2, 3], dtype='i')
assert np.asarray(a) is a
assert np.asarray(a, dtype='i') is a
# This may produce a new view or a copy, but is never the same object.
assert np.asarray(a, dtype='l') is not a
unequal_type = np.dtype('i', metadata={'spam': True})
b = np.asarray(a, dtype=unequal_type)
assert b is not a
assert b.base is a
equivalent_requirement = np.dtype('i', metadata={'spam': True})
c = np.asarray(b, dtype=equivalent_requirement)
# A quirk of the metadata test is that equivalent metadata dicts are still
# separate objects and so don't evaluate as the same array type description.
assert unequal_type == equivalent_requirement
assert unequal_type is not equivalent_requirement
assert c is not b
assert c.dtype is equivalent_requirement
|
import numpy as np
def test_fast_return():
""""""
a = np.array([1, 2, 3], dtype='i')
assert np.asarray(a) is a
assert np.asarray(a, dtype='i') is a
# This may produce a new view or a copy, but is never the same object.
assert np.asarray(a, dtype='l') is not a
unequal_type = np.dtype('i', metadata={'spam': True})
b = np.asarray(a, dtype=unequal_type)
assert b is not a
assert b.base is a
equivalent_requirement = np.dtype('i', metadata={'spam': True})
c = np.asarray(b, dtype=equivalent_requirement)
# The descriptors are equivalent, but we have created
# distinct dtype instances.
assert unequal_type == equivalent_requirement
assert unequal_type is not equivalent_requirement
assert c is not b
assert c.dtype is equivalent_requirement
|
Update comment and obey formatting requirements.
|
Update comment and obey formatting requirements.
|
Python
|
bsd-3-clause
|
charris/numpy,mhvk/numpy,mattip/numpy,mattip/numpy,mattip/numpy,numpy/numpy,mhvk/numpy,endolith/numpy,charris/numpy,numpy/numpy,endolith/numpy,charris/numpy,numpy/numpy,endolith/numpy,endolith/numpy,charris/numpy,mattip/numpy,numpy/numpy,mhvk/numpy,mhvk/numpy,mhvk/numpy
|
---
+++
@@ -16,8 +16,8 @@
equivalent_requirement = np.dtype('i', metadata={'spam': True})
c = np.asarray(b, dtype=equivalent_requirement)
- # A quirk of the metadata test is that equivalent metadata dicts are still
- # separate objects and so don't evaluate as the same array type description.
+ # The descriptors are equivalent, but we have created
+ # distinct dtype instances.
assert unequal_type == equivalent_requirement
assert unequal_type is not equivalent_requirement
assert c is not b
|
7fed0208770413399fde5e76ad2046b6bc440b16
|
src/nodemgr/common/windows_process_manager.py
|
src/nodemgr/common/windows_process_manager.py
|
#
# Copyright (c) 2018 Juniper Networks, Inc. All rights reserved.
#
import time
from windows_process_mem_cpu import WindowsProcessMemCpuUsageData
class WindowsProcessInfoManager(object):
def get_mem_cpu_usage_data(self, pid, last_cpu, last_time):
return WindowsProcessMemCpuUsageData(pid, last_cpu, last_time)
def get_all_processes(self):
return []
def runforever(self):
while True:
time.sleep(5)
|
#
# Copyright (c) 2018 Juniper Networks, Inc. All rights reserved.
#
import psutil
import time
from windows_process_mem_cpu import WindowsProcessMemCpuUsageData
def _service_status_to_state(status):
if status == 'running':
return 'PROCESS_STATE_RUNNING'
else:
return 'PROCESS_STATE_STOPPED'
def _get_service_by_name(name):
service = None
try:
service = psutil.win_service_get(name)
except:
pass
return service
def _get_process_by_pid(pid):
process = None
try:
process = psutil.Process(pid)
except:
pass
return process
class WindowsProcessInfoManager(object):
def get_mem_cpu_usage_data(self, pid, last_cpu, last_time):
return WindowsProcessMemCpuUsageData(pid, last_cpu, last_time)
def get_all_processes(self):
agent_service = _get_service_by_name('ContrailAgent')
if agent_service != None:
info = {}
info['name'] = 'contrail-vrouter-agent'
info['group'] = info['name']
info['statename'] = _service_status_to_state(agent_service.status())
if info['statename'] == 'PROCESS_STATE_RUNNING':
info['pid'] = agent_service.pid()
agent_process = _get_process_by_pid(info['pid'])
if agent_process != None:
info['start'] = str(int(agent_process.create_time() * 1000000))
return [info]
else:
return []
def runforever(self):
while True:
time.sleep(5)
|
Implement checking if agent is up on Windows
|
Implement checking if agent is up on Windows
Very simple implementation using psutil
Change-Id: I2b7c65d6d677f0f57e79277ac2298f0b73729b94
Partial-Bug: #1783539
|
Python
|
apache-2.0
|
eonpatapon/contrail-controller,rombie/contrail-controller,eonpatapon/contrail-controller,eonpatapon/contrail-controller,rombie/contrail-controller,eonpatapon/contrail-controller,rombie/contrail-controller,eonpatapon/contrail-controller,eonpatapon/contrail-controller,eonpatapon/contrail-controller,rombie/contrail-controller,rombie/contrail-controller,rombie/contrail-controller,rombie/contrail-controller
|
---
+++
@@ -2,16 +2,53 @@
# Copyright (c) 2018 Juniper Networks, Inc. All rights reserved.
#
+import psutil
import time
from windows_process_mem_cpu import WindowsProcessMemCpuUsageData
+
+
+def _service_status_to_state(status):
+ if status == 'running':
+ return 'PROCESS_STATE_RUNNING'
+ else:
+ return 'PROCESS_STATE_STOPPED'
+
+def _get_service_by_name(name):
+ service = None
+ try:
+ service = psutil.win_service_get(name)
+ except:
+ pass
+ return service
+
+def _get_process_by_pid(pid):
+ process = None
+ try:
+ process = psutil.Process(pid)
+ except:
+ pass
+ return process
class WindowsProcessInfoManager(object):
def get_mem_cpu_usage_data(self, pid, last_cpu, last_time):
return WindowsProcessMemCpuUsageData(pid, last_cpu, last_time)
def get_all_processes(self):
- return []
+ agent_service = _get_service_by_name('ContrailAgent')
+ if agent_service != None:
+ info = {}
+ info['name'] = 'contrail-vrouter-agent'
+ info['group'] = info['name']
+ info['statename'] = _service_status_to_state(agent_service.status())
+ if info['statename'] == 'PROCESS_STATE_RUNNING':
+ info['pid'] = agent_service.pid()
+ agent_process = _get_process_by_pid(info['pid'])
+ if agent_process != None:
+ info['start'] = str(int(agent_process.create_time() * 1000000))
+ return [info]
+ else:
+ return []
def runforever(self):
while True:
|
47a7770bd3c5552d61f69b7df62bf4c36de56dc8
|
wysteria/__init__.py
|
wysteria/__init__.py
|
from wysteria.client import Client, TlsConfig
from wysteria import errors
from wysteria.constants import FACET_COLLECTION
from wysteria.constants import FACET_ITEM_TYPE
from wysteria.constants import FACET_ITEM_VARIANT
__all__ = [
"Client",
"TlsConfig",
"errors",
"FACET_COLLECTION",
"FACET_ITEM_TYPE",
"FACET_ITEM_VARIANT",
]
|
"""The wysteria module provides a python interface for talking to a wysteria asset management
server.
Files:
------
- client.py
high level class that wraps a middleware connection & adds some helpful functions.
- constants.py
various constants used
- errors.py
contains various exceptions that can be raised
- search.py
simple class for building wysteria search params
- utils.py
simple utility functions for reading config files and other misc stuff
Modules
-------
- domain
python wrappers around various wysteria native objects
- middleware
python implementations of the communication protocol for talking to the server
Exported
--------
Client
Wysteria client wrapper class
TlsConfig
Simplified TLS config object that can be used to secure the middleware connection
errors
Error module that contains various exceptions that can be raised by the client
default_client
Sugar function to build & configure a client. Searches for a wysteria client config & falls
back on using some default hardcoded settings if all else fails.
from_config
Construct & configure a client from a given config file.
"""
from wysteria.client import Client
from wysteria import errors
from wysteria.constants import FACET_COLLECTION
from wysteria.constants import FACET_ITEM_TYPE
from wysteria.constants import FACET_ITEM_VARIANT
from wysteria.constants import FACET_LINK_TYPE
from wysteria.constants import VALUE_LINK_TYPE_VERSION
from wysteria.constants import VALUE_LINK_TYPE_ITEM
from wysteria.utils import default_client
from wysteria.utils import from_config
__all__ = [
"Client",
"errors",
"default_client",
"from_config",
"FACET_COLLECTION",
"FACET_ITEM_TYPE",
"FACET_ITEM_VARIANT",
"FACET_LINK_TYPE",
"VALUE_LINK_TYPE_VERSION",
"VALUE_LINK_TYPE_ITEM",
]
|
Add module level imports and doc strings
|
Add module level imports and doc strings
|
Python
|
bsd-3-clause
|
voidshard/pywysteria,voidshard/pywysteria
|
---
+++
@@ -1,15 +1,72 @@
-from wysteria.client import Client, TlsConfig
+"""The wysteria module provides a python interface for talking to a wysteria asset management
+server.
+
+
+Files:
+------
+
+- client.py
+ high level class that wraps a middleware connection & adds some helpful functions.
+- constants.py
+ various constants used
+- errors.py
+ contains various exceptions that can be raised
+- search.py
+ simple class for building wysteria search params
+- utils.py
+ simple utility functions for reading config files and other misc stuff
+
+
+Modules
+-------
+
+ - domain
+ python wrappers around various wysteria native objects
+ - middleware
+ python implementations of the communication protocol for talking to the server
+
+
+Exported
+--------
+
+ Client
+ Wysteria client wrapper class
+
+ TlsConfig
+ Simplified TLS config object that can be used to secure the middleware connection
+
+ errors
+ Error module that contains various exceptions that can be raised by the client
+
+ default_client
+ Sugar function to build & configure a client. Searches for a wysteria client config & falls
+ back on using some default hardcoded settings if all else fails.
+
+ from_config
+ Construct & configure a client from a given config file.
+
+"""
+from wysteria.client import Client
from wysteria import errors
from wysteria.constants import FACET_COLLECTION
from wysteria.constants import FACET_ITEM_TYPE
from wysteria.constants import FACET_ITEM_VARIANT
+from wysteria.constants import FACET_LINK_TYPE
+from wysteria.constants import VALUE_LINK_TYPE_VERSION
+from wysteria.constants import VALUE_LINK_TYPE_ITEM
+from wysteria.utils import default_client
+from wysteria.utils import from_config
__all__ = [
"Client",
- "TlsConfig",
"errors",
+ "default_client",
+ "from_config",
"FACET_COLLECTION",
"FACET_ITEM_TYPE",
"FACET_ITEM_VARIANT",
+ "FACET_LINK_TYPE",
+ "VALUE_LINK_TYPE_VERSION",
+ "VALUE_LINK_TYPE_ITEM",
]
|
990ae22e95705bf4131c6a8326408a8fb2648433
|
zerodb/crypto/ecc.py
|
zerodb/crypto/ecc.py
|
import six
import hashlib
import ecdsa # We can use pyelliptic (uses OpenSSL) but this is more cross-patform
# We use curve standard for Bitcoin by default
CURVE = ecdsa.SECP256k1
class SigningKey(ecdsa.SigningKey, object):
def get_pubkey(self):
return b'\x04' + self.get_verifying_key().to_string()
def sign(self, msg):
return super(SigningKey, self).sign(
msg,
sigencode=ecdsa.util.sigencode_der,
hashfunc=hashlib.sha256)
class VerifyingKey(ecdsa.VerifyingKey, object):
def verify(self, signature, data):
return super(VerifyingKey, self).verify(
signature, data,
hashfunc=hashlib.sha256,
sigdecode=ecdsa.util.sigdecode_der)
def private(seed, salt, kdf=None, curve=CURVE):
assert callable(kdf)
if six.PY3 and isinstance(seed, six.string_types):
seed = seed.encode()
if isinstance(salt, (list, tuple)):
salt = "|".join(salt)
if six.PY3:
salt = salt.encode()
return SigningKey.from_string(kdf(seed, salt), curve=curve)
def public(pub, curve=CURVE):
assert pub[0] == b'\x04'[0]
return VerifyingKey.from_string(pub[1:], curve=curve)
|
import six
import hashlib
import ecdsa # We can use pyelliptic (uses OpenSSL) but this is more cross-platform
# We use curve standard for Bitcoin by default
CURVE = ecdsa.SECP256k1
class SigningKey(ecdsa.SigningKey, object):
def get_pubkey(self):
return b'\x04' + self.get_verifying_key().to_string()
def sign(self, msg):
return super(SigningKey, self).sign(
msg,
sigencode=ecdsa.util.sigencode_der,
hashfunc=hashlib.sha256)
class VerifyingKey(ecdsa.VerifyingKey, object):
def verify(self, signature, data):
return super(VerifyingKey, self).verify(
signature, data,
hashfunc=hashlib.sha256,
sigdecode=ecdsa.util.sigdecode_der)
def private(seed, salt, kdf=None, curve=CURVE):
assert callable(kdf)
if six.PY3 and isinstance(seed, six.string_types):
seed = seed.encode()
if isinstance(salt, (list, tuple)):
salt = "|".join(salt)
if six.PY3:
salt = salt.encode()
return SigningKey.from_string(kdf(seed, salt), curve=curve)
def public(pub, curve=CURVE):
assert pub[0] == b'\x04'[0]
return VerifyingKey.from_string(pub[1:], curve=curve)
|
Fix a typo: patform -> platform
|
Fix a typo: patform -> platform
|
Python
|
agpl-3.0
|
zerodb/zerodb,zerodb/zerodb,zero-db/zerodb,zero-db/zerodb
|
---
+++
@@ -1,6 +1,6 @@
import six
import hashlib
-import ecdsa # We can use pyelliptic (uses OpenSSL) but this is more cross-patform
+import ecdsa # We can use pyelliptic (uses OpenSSL) but this is more cross-platform
# We use curve standard for Bitcoin by default
CURVE = ecdsa.SECP256k1
|
869bafa9aadf45c2beb3e6f4e3d3751d2d6baf8f
|
subversion/bindings/swig/python/tests/core.py
|
subversion/bindings/swig/python/tests/core.py
|
import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
|
import unittest, os
import svn.core
class SubversionCoreTestCase(unittest.TestCase):
"""Test cases for the basic SWIG Subversion core"""
def test_SubversionException(self):
self.assertEqual(svn.core.SubversionException().args, ())
self.assertEqual(svn.core.SubversionException('error message').args,
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
self.assertEqual(svn.core.SubversionException('error message', 1).apr_err,
1)
self.assertEqual(svn.core.SubversionException('error message', 1).message,
'error message')
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
self.assertEqual(1, svn.core.svn_mime_type_is_binary("image/png"))
def test_mime_type_validate(self):
self.assertRaises(svn.core.SubversionException,
svn.core.svn_mime_type_validate, "this\nis\ninvalid\n")
svn.core.svn_mime_type_validate("unknown/but-valid; charset=utf8")
def suite():
return unittest.makeSuite(SubversionCoreTestCase, 'test')
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
|
Add a regression test for the bug fixed in r28485.
|
Add a regression test for the bug fixed in r28485.
* subversion/bindings/swig/python/tests/core.py
(SubversionCoreTestCase.test_SubversionException): Test explicit
exception fields.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@868579 13f79535-47bb-0310-9956-ffa450edef68
|
Python
|
apache-2.0
|
wbond/subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion
|
---
+++
@@ -11,6 +11,10 @@
('error message',))
self.assertEqual(svn.core.SubversionException('error message', 1).args,
('error message', 1))
+ self.assertEqual(svn.core.SubversionException('error message', 1).apr_err,
+ 1)
+ self.assertEqual(svn.core.SubversionException('error message', 1).message,
+ 'error message')
def test_mime_type_is_binary(self):
self.assertEqual(0, svn.core.svn_mime_type_is_binary("text/plain"))
|
b1b02a65cded26e7b0a6ddf207def5522297f7a7
|
__openerp__.py
|
__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
##############################################################################
{
'name': u"Asset Streamline",
'version': u"0.1",
'author': u"XCG Consulting",
'category': u"Custom Module",
'description': u"""Includes several integrity fixes and optimizations over
the standard module.
""",
'website': u"",
'depends': [
'base',
'account_streamline',
'account_asset',
'oemetasl',
],
'data': [
'data/asset_sequence.xml',
'security/ir.model.access.csv',
'wizard/account_asset_close_view.xml',
'wizard/account_asset_suspend_view.xml',
'wizard/account_asset_change_values_view.xml',
'wizard/account_asset_depreciation_wizard.xml',
'wizard/account_asset_change_duration_view.xml',
'views/account_asset_view.xml',
],
'demo': [
'demo/account_asset_demo.xml'
],
'css': [
'static/src/css/account_asset_streamline.css'
],
'test': [],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
##############################################################################
{
'name': u"Asset Streamline",
'version': u"1.0",
'author': u"XCG Consulting",
'category': u"Custom Module",
'description': u"""Includes several integrity fixes and optimizations over
the standard module.
""",
'website': u"",
'depends': [
'base',
'account_streamline',
'analytic_structure'
'account_asset',
'oemetasl',
],
'data': [
'data/asset_sequence.xml',
'security/ir.model.access.csv',
'wizard/account_asset_close_view.xml',
'wizard/account_asset_suspend_view.xml',
'wizard/account_asset_change_values_view.xml',
'wizard/account_asset_depreciation_wizard.xml',
'wizard/account_asset_change_duration_view.xml',
'views/account_asset_view.xml',
],
'demo': [
'demo/account_asset_demo.xml'
],
'css': [
'static/src/css/account_asset_streamline.css'
],
'test': [],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Add dependency for 'analytic_structure' and change version to 1.0
|
Add dependency for 'analytic_structure' and change version to 1.0
|
Python
|
agpl-3.0
|
xcgd/account_asset_streamline
|
---
+++
@@ -4,7 +4,7 @@
##############################################################################
{
'name': u"Asset Streamline",
- 'version': u"0.1",
+ 'version': u"1.0",
'author': u"XCG Consulting",
'category': u"Custom Module",
'description': u"""Includes several integrity fixes and optimizations over
@@ -14,6 +14,7 @@
'depends': [
'base',
'account_streamline',
+ 'analytic_structure'
'account_asset',
'oemetasl',
],
|
0c60434dc573b5770b8061751771c773032a4f76
|
salt/output/__init__.py
|
salt/output/__init__.py
|
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
get_printout(out, opts)(data)
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
|
'''
Used to manage the outputter system. This package is the modular system used
for managing outputters.
'''
# Import salt utils
import salt.loader
STATIC = (
'yaml_out',
'txt_out',
'raw_out',
'json_out',
)
def display_output(data, out, opts=None):
'''
Print the passed data using the desired output
'''
get_printout(out, opts)(data)
def get_printout(out, opts=None, **kwargs):
'''
Return a printer function
'''
for outputter in STATIC:
if outputter in opts:
if opts[outputter]:
out = outputter
if out.endswith('_out'):
out = out[:-4]
if opts is None:
opts = {}
opts.update(kwargs)
if not 'color' in opts:
opts['color'] = not bool(opts.get('no_color', False))
outputters = salt.loader.outputters(opts)
if not out in outputters:
return outputters['pprint']
return outputters[out]
|
Handle output passthrou from the cli
|
Handle output passthrou from the cli
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
---
+++
@@ -6,6 +6,13 @@
# Import salt utils
import salt.loader
+
+STATIC = (
+ 'yaml_out',
+ 'txt_out',
+ 'raw_out',
+ 'json_out',
+ )
def display_output(data, out, opts=None):
'''
@@ -18,6 +25,10 @@
'''
Return a printer function
'''
+ for outputter in STATIC:
+ if outputter in opts:
+ if opts[outputter]:
+ out = outputter
if out.endswith('_out'):
out = out[:-4]
if opts is None:
|
9c058304c9ad1ad8c9220bc9f098a9dcf80700b9
|
valohai_yaml/objs/pipelines/execution_node.py
|
valohai_yaml/objs/pipelines/execution_node.py
|
from .node import Node
class ExecutionNode(Node):
type = 'execution'
def __init__(self, name, step, override=None):
if override is None:
override = {}
self.name = name
self.step = step
self.override = override
|
from .node import Node
class ExecutionNode(Node):
type = 'execution'
def __init__(self, name, step, override=None):
if override is None:
override = {}
self.name = name
self.step = step
self.override = override
def lint(self, lint_result, context):
super().lint(lint_result, context)
config = context['config']
pipeline = context['pipeline']
if self.step not in config.steps:
lint_result.add_error('Pipeline {pipeline} node {node} step {step} does not exist'.format(
pipeline=pipeline.name,
node=self.name,
step=self.step,
))
|
Add linting for pipeline step existence
|
Add linting for pipeline step existence
|
Python
|
mit
|
valohai/valohai-yaml
|
---
+++
@@ -10,3 +10,14 @@
self.name = name
self.step = step
self.override = override
+
+ def lint(self, lint_result, context):
+ super().lint(lint_result, context)
+ config = context['config']
+ pipeline = context['pipeline']
+ if self.step not in config.steps:
+ lint_result.add_error('Pipeline {pipeline} node {node} step {step} does not exist'.format(
+ pipeline=pipeline.name,
+ node=self.name,
+ step=self.step,
+ ))
|
4db714570a9ce58a08c72aa1477e9e7a48ed650c
|
tests/util_tests.py
|
tests/util_tests.py
|
# -*- coding: utf-8 -*-
from chai import Chai
from arrow import util
class UtilTests(Chai):
def test_is_timestamp(self):
timestamp_float = 1563047716.958061
timestamp_int = int(timestamp_float)
self.assertTrue(util.is_timestamp(timestamp_int))
self.assertTrue(util.is_timestamp(timestamp_float))
self.assertFalse(util.is_timestamp(str(timestamp_int)))
self.assertFalse(util.is_timestamp(str(timestamp_float)))
self.assertFalse(util.is_timestamp(True))
self.assertFalse(util.is_timestamp(False))
full_datetime = "2019-06-23T13:12:42"
self.assertFalse(util.is_timestamp(full_datetime))
overflow_timestamp_float = 99999999999999999999999999.99999999999999999999999999
with self.assertRaises((OverflowError, ValueError)):
util.is_timestamp(overflow_timestamp_float)
overflow_timestamp_int = int(overflow_timestamp_float)
with self.assertRaises((OverflowError, ValueError)):
util.is_timestamp(overflow_timestamp_int)
|
# -*- coding: utf-8 -*-
import time
from chai import Chai
from arrow import util
class UtilTests(Chai):
def test_is_timestamp(self):
timestamp_float = time.time()
timestamp_int = int(timestamp_float)
self.assertTrue(util.is_timestamp(timestamp_int))
self.assertTrue(util.is_timestamp(timestamp_float))
self.assertFalse(util.is_timestamp(str(timestamp_int)))
self.assertFalse(util.is_timestamp(str(timestamp_float)))
self.assertFalse(util.is_timestamp(True))
self.assertFalse(util.is_timestamp(False))
full_datetime = "2019-06-23T13:12:42"
self.assertFalse(util.is_timestamp(full_datetime))
overflow_timestamp_float = 99999999999999999999999999.99999999999999999999999999
with self.assertRaises((OverflowError, ValueError)):
util.is_timestamp(overflow_timestamp_float)
overflow_timestamp_int = int(overflow_timestamp_float)
with self.assertRaises((OverflowError, ValueError)):
util.is_timestamp(overflow_timestamp_int)
|
Replace hard coded timestamp with time.time()
|
Replace hard coded timestamp with time.time()
|
Python
|
apache-2.0
|
crsmithdev/arrow
|
---
+++
@@ -1,4 +1,6 @@
# -*- coding: utf-8 -*-
+import time
+
from chai import Chai
from arrow import util
@@ -6,7 +8,7 @@
class UtilTests(Chai):
def test_is_timestamp(self):
- timestamp_float = 1563047716.958061
+ timestamp_float = time.time()
timestamp_int = int(timestamp_float)
self.assertTrue(util.is_timestamp(timestamp_int))
|
7a172a7fe98223fd20a4bb5d497aa17653b8a13b
|
dev_tools/coverage_runner.py
|
dev_tools/coverage_runner.py
|
"""Run tests under coverage's measurement system (Used in CI)
"""
import os
import sys
from os.path import join, realpath
# Third Party modules
import nose
import coverage
cov = coverage.coverage(branch=True)
cov.start()
result = nose.run(defaultTest=realpath(join(__file__, "..", "..", "py2c")))
cov.stop()
cov.save()
if result == 0:
# If we are in CI environment, don't write an HTML report.
if os.environ.get("CI", None) is None:
cov.html_report()
cov.report()
sys.exit(result)
|
"""Run tests under coverage's measurement system (Used in CI)
"""
import os
import sys
from os.path import join, realpath
# Third Party modules
import nose
import coverage
cov = coverage.coverage(branch=True)
cov.start()
success = nose.run(defaultTest=realpath(join(__file__, "..", "..", "py2c")))
cov.stop()
cov.save()
if success:
# If we are in CI environment, don't write an HTML report.
if os.environ.get("CI", None) is None:
cov.html_report()
cov.report()
sys.exit(0 if success else 1)
|
Correct the usage of nose.run.
|
[TRAVIS] Correct the usage of nose.run.
nose.run returns whether the test run was sucessful or not.
|
Python
|
bsd-3-clause
|
pradyunsg/Py2C,pradyunsg/Py2C
|
---
+++
@@ -12,15 +12,14 @@
cov = coverage.coverage(branch=True)
cov.start()
-result = nose.run(defaultTest=realpath(join(__file__, "..", "..", "py2c")))
+success = nose.run(defaultTest=realpath(join(__file__, "..", "..", "py2c")))
cov.stop()
cov.save()
-
-if result == 0:
+if success:
# If we are in CI environment, don't write an HTML report.
if os.environ.get("CI", None) is None:
cov.html_report()
cov.report()
-sys.exit(result)
+sys.exit(0 if success else 1)
|
bd0800d46126d963f1ae107924a632752bc94173
|
indra/sources/bel/__init__.py
|
indra/sources/bel/__init__.py
|
from .api import process_ndex_neighborhood
from .api import process_belrdf
from .api import process_belscript
from .api import process_pybel_graph
from .api import process_json_file
from .api import process_pybel_neighborhood
|
from .api import process_ndex_neighborhood, process_belrdf, \
process_belscript, process_pybel_graph, process_json_file, \
process_pybel_neighborhood, process_cbn_jgif_file
|
Add all endpoints to BEL API
|
Add all endpoints to BEL API
|
Python
|
bsd-2-clause
|
johnbachman/indra,sorgerlab/indra,bgyori/indra,bgyori/indra,johnbachman/indra,sorgerlab/indra,johnbachman/belpy,bgyori/indra,sorgerlab/belpy,sorgerlab/belpy,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,sorgerlab/indra
|
---
+++
@@ -1,6 +1,3 @@
-from .api import process_ndex_neighborhood
-from .api import process_belrdf
-from .api import process_belscript
-from .api import process_pybel_graph
-from .api import process_json_file
-from .api import process_pybel_neighborhood
+from .api import process_ndex_neighborhood, process_belrdf, \
+ process_belscript, process_pybel_graph, process_json_file, \
+ process_pybel_neighborhood, process_cbn_jgif_file
|
bcbe4f9d91ef386b5a09d99e9c0c22b4dfcdc09b
|
dmf_device_ui/__init__.py
|
dmf_device_ui/__init__.py
|
# -*- coding: utf-8 -*-
import gtk
import uuid
def gtk_wait(wait_duration_s):
gtk.main_iteration_do()
def generate_plugin_name(prefix='plugin-'):
'''
Generate unique plugin name.
'''
return prefix + str(uuid.uuid4()).split('-')[0]
|
# -*- coding: utf-8 -*-
from pygtkhelpers.utils import refresh_gui
import uuid
def gtk_wait(wait_duration_s):
refresh_gui()
def generate_plugin_name(prefix='plugin-'):
'''
Generate unique plugin name.
'''
return prefix + str(uuid.uuid4()).split('-')[0]
|
Use pygtkhelpers refresh_gui in gtk_wait
|
Use pygtkhelpers refresh_gui in gtk_wait
|
Python
|
lgpl-2.1
|
wheeler-microfluidics/dmf-device-ui
|
---
+++
@@ -1,10 +1,10 @@
# -*- coding: utf-8 -*-
-import gtk
+from pygtkhelpers.utils import refresh_gui
import uuid
def gtk_wait(wait_duration_s):
- gtk.main_iteration_do()
+ refresh_gui()
def generate_plugin_name(prefix='plugin-'):
|
30be8d71fee8f7429d6b4d48a8168133062e3315
|
text_test/regex_utils_test.py
|
text_test/regex_utils_test.py
|
# coding=utf-8
import unittest
from text import regex_utils
class RegexUtilsTest(unittest.TestCase):
def test_check_line(self):
pass
def test_parse_line(self):
pass
if __name__ == '__main__':
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
# coding=utf-8
import unittest
from text import regex_utils
class RegexUtilsTest(unittest.TestCase):
def test_check_line(self):
self.assertTrue(regex_utils.check_line('.*(\d+.\d+.\d+.\d+)', 'MyIP is 192.168.199.4'))
self.assertTrue(regex_utils.check_line('Test (Data|Case) For (py-text|py-task)', 'Test Data For py-text'))
self.assertFalse(regex_utils.check_line('.*(\d+.\d+.\d+.{100,255})', 'MyIP is 192.168.199.4'))
self.assertFalse(regex_utils.check_line(None, 'Test Word'))
self.assertFalse(regex_utils.check_line('.*', None))
def test_parse_line(self):
result = regex_utils.parse_line('name=(\S+), type=(\S+), ip=(\S+)', 'name=ASA5505, type=Firewall, ip=192.168.199.4')
self.assertEqual(len(result), 3)
self.assertEqual(result[0], 'ASA5505')
self.assertEqual(result[1], 'Firewall')
self.assertEqual(result[2], '192.168.199.4')
result = regex_utils.parse_line('Test Data', None)
self.assertEqual(result, None)
result = regex_utils.parse_line(None, 'Test Data')
self.assertEqual(result, 'Test Data')
if __name__ == '__main__':
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Update regex_utils unit test case
|
Update regex_utils unit test case
|
Python
|
apache-2.0
|
PinaeOS/py-text,interhui/py-text
|
---
+++
@@ -6,10 +6,25 @@
class RegexUtilsTest(unittest.TestCase):
def test_check_line(self):
- pass
+ self.assertTrue(regex_utils.check_line('.*(\d+.\d+.\d+.\d+)', 'MyIP is 192.168.199.4'))
+ self.assertTrue(regex_utils.check_line('Test (Data|Case) For (py-text|py-task)', 'Test Data For py-text'))
+ self.assertFalse(regex_utils.check_line('.*(\d+.\d+.\d+.{100,255})', 'MyIP is 192.168.199.4'))
+ self.assertFalse(regex_utils.check_line(None, 'Test Word'))
+ self.assertFalse(regex_utils.check_line('.*', None))
def test_parse_line(self):
- pass
+ result = regex_utils.parse_line('name=(\S+), type=(\S+), ip=(\S+)', 'name=ASA5505, type=Firewall, ip=192.168.199.4')
+ self.assertEqual(len(result), 3)
+ self.assertEqual(result[0], 'ASA5505')
+ self.assertEqual(result[1], 'Firewall')
+ self.assertEqual(result[2], '192.168.199.4')
+
+ result = regex_utils.parse_line('Test Data', None)
+ self.assertEqual(result, None)
+
+ result = regex_utils.parse_line(None, 'Test Data')
+ self.assertEqual(result, 'Test Data')
+
if __name__ == '__main__':
# import sys;sys.argv = ['', 'Test.testName']
|
3b73440a59b22bcbbaa16a2f8c2ff49b1f985b7f
|
examples/example3_components.py
|
examples/example3_components.py
|
import luigi
import sciluigi as sl
import time
class T1(sl.Task):
# Parameter
text = luigi.Parameter()
# I/O
def out_data1(self):
return sl.TargetInfo(self, self.text + '.txt') # TODO: Of course make the target spec into an object with "get target" method!
# Implementation
def run(self):
with self.out_data1().target.open('w') as outfile:
outfile.write(self.text)
# ========================================================================
class Merge(sl.Task):
# I/O
in_data1 = None
in_data2 = None
def out_merged(self):
return sl.TargetInfo(self, self.in_data1().path + '.merged.txt')
# Implementation
def run(self):
with self.in_data1().target.open() as in1, self.in_data2().target.open() as in2, self.out_merged().target.open('w') as outfile:
for row in in1:
outfile.write(row+'\n')
for row in in2:
outfile.write(row+'\n')
|
import luigi
import sciluigi as sl
import time
class T1(sl.Task):
# Parameter
text = luigi.Parameter()
# I/O
def out_data1(self):
return sl.TargetInfo(self, self.text + '.txt') # TODO: Of course make the target spec into an object with "get target" method!
# Implementation
def run(self):
with self.out_data1().open('w') as outfile:
outfile.write(self.text)
# ========================================================================
class Merge(sl.Task):
# I/O
in_data1 = None
in_data2 = None
def out_merged(self):
return sl.TargetInfo(self, self.in_data1().path + '.merged.txt')
# Implementation
def run(self):
with self.in_data1().open() as in1, self.in_data2().open() as in2, self.out_merged().open('w') as outfile:
for row in in1:
outfile.write(row+'\n')
for row in in2:
outfile.write(row+'\n')
|
Use new open() function on TargetInfo, in example3
|
Use new open() function on TargetInfo, in example3
|
Python
|
mit
|
samuell/sciluigi,pharmbio/sciluigi,pharmbio/sciluigi
|
---
+++
@@ -16,7 +16,7 @@
# Implementation
def run(self):
- with self.out_data1().target.open('w') as outfile:
+ with self.out_data1().open('w') as outfile:
outfile.write(self.text)
# ========================================================================
@@ -34,7 +34,8 @@
# Implementation
def run(self):
- with self.in_data1().target.open() as in1, self.in_data2().target.open() as in2, self.out_merged().target.open('w') as outfile:
+ with self.in_data1().open() as in1, self.in_data2().open() as in2, self.out_merged().open('w') as outfile:
+
for row in in1:
outfile.write(row+'\n')
for row in in2:
|
47088dd1ed69207e6e74af98c1f6a4124493ed0c
|
forum/forms.py
|
forum/forms.py
|
from django.forms import ModelForm,Textarea,TextInput
from .models import Post
class PostForm(ModelForm):
class Meta:
model = Post
fields = ('subject','body')
widgets = {
'subject': TextInput(attrs={'autofocus':'autofocus'}),
'body': Textarea(
attrs={
'data-provide':'markdown',
'data-hidden-buttons':'cmdHeading',
}),
}
|
from django.forms import ModelForm,Textarea,TextInput
from .models import Post
class PostForm(ModelForm):
class Meta:
model = Post
fields = ('subject','body')
widgets = {
'subject': TextInput(attrs={'autofocus':'autofocus'}),
'body': Textarea(
attrs={
'data-provide':'markdown',
'data-hidden-buttons':'cmdHeading',
'data-iconlibrary':'octicons',
}),
}
|
Use Octicons in Markdown editor
|
Use Octicons in Markdown editor
|
Python
|
mit
|
Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters
|
---
+++
@@ -14,6 +14,7 @@
attrs={
'data-provide':'markdown',
'data-hidden-buttons':'cmdHeading',
+ 'data-iconlibrary':'octicons',
}),
}
|
37a8277bc53e5fe03c94d1bdaacb4087585fd36e
|
application.py
|
application.py
|
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
|
#!/usr/bin/env python
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
|
Make it easier to run
|
Make it easier to run
|
Python
|
mpl-2.0
|
radioprotector/radremedy,AllieDeford/radremedy,AllieDeford/radremedy,radioprotector/radremedy,radremedy/radremedy,radremedy/radremedy,radioprotector/radremedy,radioprotector/radremedy,radremedy/radremedy,radremedy/radremedy,AllieDeford/radremedy
|
---
+++
@@ -1,4 +1,4 @@
-
+#!/usr/bin/env python
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
|
aa242ab8451887fe8a4ddfa223d0e11c8c3a472f
|
lilkv/columnfamily.py
|
lilkv/columnfamily.py
|
# -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
pass
def insert(self, Column):
pass
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def delete(self, key):
# NOTE: Really an insert with a tombstone
insert(key, tombstone=True)
pass
def __repr__(self):
return '<%r>' % self.name
|
# -*- coding: utf-8 -*-
"""
lilkv.columnfamily
This module implements the client-facing aspect of the `lilkv` app. All
requests are handled through this interface.
"""
class ColumnFamily(object):
"""Column Family objects store information about all rows.
daily_purchases_cf = ColumnFamily("daily_purchases")
"""
def __init__(self, name, data_dir='data'):
self.name = name
self.ROWS = set()
def insert(self, column):
return self._insert(column)
def delete(self, column):
column.tombstone = True
return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
def _insert(self, column):
try:
self.ROWS.add(column)
return True
except:
return False
def __repr__(self):
return '<%r>' % self.name
|
Define inserts and deletes on CFs.
|
Define inserts and deletes on CFs.
|
Python
|
mit
|
pgorla/lil-kv
|
---
+++
@@ -16,19 +16,25 @@
def __init__(self, name, data_dir='data'):
self.name = name
- pass
+ self.ROWS = set()
- def insert(self, Column):
- pass
+ def insert(self, column):
+ return self._insert(column)
+
+ def delete(self, column):
+ column.tombstone = True
+ return self._insert(column)
def get(self, key):
# NOTE: Check for tombstones / TTL here
pass
- def delete(self, key):
- # NOTE: Really an insert with a tombstone
- insert(key, tombstone=True)
- pass
+ def _insert(self, column):
+ try:
+ self.ROWS.add(column)
+ return True
+ except:
+ return False
def __repr__(self):
return '<%r>' % self.name
|
b52523b78b7ebc5358cb3dc9aa257cc5b3fbbb72
|
blog/models.py
|
blog/models.py
|
from django.db import models
from django.utils import timezone
class Post(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=100)
tags = models.CharField(max_length=200)
pub_date = models.DateTimeField(blank=True, null=True)
text = models.TextField()
def publish(self):
self.pub_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
from django.db import models
from django.utils import timezone
class Post(models.Model):
title = models.CharField(max_length=200)
pub_date = models.DateTimeField(blank=True, null=True)
text = models.TextField()
def __str__(self):
return self.title
|
Remove tags and fixed .gitignore
|
Remove tags and fixed .gitignore
|
Python
|
mit
|
DLance96/django-blog,DLance96/django-blog,DLance96/django-blog
|
---
+++
@@ -3,14 +3,8 @@
class Post(models.Model):
title = models.CharField(max_length=200)
- author = models.CharField(max_length=100)
- tags = models.CharField(max_length=200)
pub_date = models.DateTimeField(blank=True, null=True)
text = models.TextField()
- def publish(self):
- self.pub_date = timezone.now()
- self.save()
-
def __str__(self):
return self.title
|
1bd74c601a7e198461095b44a268eb4ee50c913d
|
wheelcms_project/settings/base/settings_logging.py
|
wheelcms_project/settings/base/settings_logging.py
|
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
},
}
}
|
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
},
}
}
|
Fix django 1.5 warning - provide debug filter
|
Fix django 1.5 warning - provide debug filter
|
Python
|
bsd-2-clause
|
wheelcms/wheelcms_project
|
---
+++
@@ -6,9 +6,15 @@
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
+ 'filters': {
+ 'require_debug_false': {
+ '()': 'django.utils.log.RequireDebugFalse'
+ }
+ },
'handlers': {
'mail_admins': {
'level': 'ERROR',
+ 'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
|
12c57e52d3f107ce9723f33e7f35ef752bb8f3bc
|
axelrod/tests/unit/test_deterministic_cache.py
|
axelrod/tests/unit/test_deterministic_cache.py
|
import unittest
class TestDeterministicCache(unittest.TestCase):
def test_init(self):
pass
def test_setitem(self):
pass
def test_save(self):
pass
def test_load(self):
pass
|
import unittest
from axelrod import DeterministicCache, TitForTat, Defector
class TestDeterministicCache(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_key1 = (TitForTat, Defector)
cls.test_value1 = [('C', 'D'), ('D', 'D'), ('D', 'D')]
def test_basic_init(self):
cache = DeterministicCache()
self.assertTrue(cache.mutable)
self.assertEqual(cache.turns, None)
def test_init_from_file(self):
pass
def test_setitem(self):
cache = DeterministicCache()
cache[self.test_key1] = self.test_value1
self.assertEqual(cache[self.test_key1], self.test_value1)
def test_set_immutable_cache(self):
cache = DeterministicCache()
cache.mutable = False
with self.assertRaises(ValueError):
cache[self.test_key1] = self.test_value1
def test_is_valid_key(self):
cache = DeterministicCache()
self.assertTrue(cache._is_valid_key(self.test_key1))
# Should return false if key is not a tuple
self.assertFalse(cache._is_valid_key('test'))
# Should return false if tuple is not a pair
self.assertFalse(cache._is_valid_key(('test', 'test', 'test')))
# Should return false if contents of tuple are not axelrod Players
self.assertFalse(cache._is_valid_key(('test', 'test')))
def test_is_valid_value(self):
pass
def test_save(self):
pass
def test_load(self):
pass
|
Add content for basic tests
|
Add content for basic tests
|
Python
|
mit
|
ranjinidas/Axelrod,marcharper/Axelrod,ranjinidas/Axelrod,marcharper/Axelrod
|
---
+++
@@ -1,12 +1,44 @@
import unittest
+from axelrod import DeterministicCache, TitForTat, Defector
class TestDeterministicCache(unittest.TestCase):
- def test_init(self):
+ @classmethod
+ def setUpClass(cls):
+ cls.test_key1 = (TitForTat, Defector)
+ cls.test_value1 = [('C', 'D'), ('D', 'D'), ('D', 'D')]
+
+ def test_basic_init(self):
+ cache = DeterministicCache()
+ self.assertTrue(cache.mutable)
+ self.assertEqual(cache.turns, None)
+
+ def test_init_from_file(self):
pass
def test_setitem(self):
+ cache = DeterministicCache()
+ cache[self.test_key1] = self.test_value1
+ self.assertEqual(cache[self.test_key1], self.test_value1)
+
+ def test_set_immutable_cache(self):
+ cache = DeterministicCache()
+ cache.mutable = False
+ with self.assertRaises(ValueError):
+ cache[self.test_key1] = self.test_value1
+
+ def test_is_valid_key(self):
+ cache = DeterministicCache()
+ self.assertTrue(cache._is_valid_key(self.test_key1))
+ # Should return false if key is not a tuple
+ self.assertFalse(cache._is_valid_key('test'))
+ # Should return false if tuple is not a pair
+ self.assertFalse(cache._is_valid_key(('test', 'test', 'test')))
+ # Should return false if contents of tuple are not axelrod Players
+ self.assertFalse(cache._is_valid_key(('test', 'test')))
+
+ def test_is_valid_value(self):
pass
def test_save(self):
|
44a4df24e65420a37638b895ddc59147bae2502e
|
clock.py
|
clock.py
|
from apscheduler.schedulers.blocking import BlockingScheduler
sched = BlockingScheduler()
@sched.scheduled_job('cron', hour="*/3", minute=0)
def updater():
"""
Run our update command every three hours.
"""
# Set env
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
# Boot Django
import django
django.setup()
# Run the command
from django.core.management import call_command
call_command("updatedownloadswebsite", noinput=True, verbosity=3)
sched.start()
|
from apscheduler.schedulers.blocking import BlockingScheduler
sched = BlockingScheduler()
@sched.scheduled_job('cron', hour="10", minute=0)
def updater():
"""
Run our update command every three hours.
"""
# Set env
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
# Boot Django
import django
django.setup()
# Run the command
from django.core.management import call_command
call_command("updatedownloadswebsite", noinput=True, verbosity=3)
sched.start()
|
Reduce how often this runs
|
Reduce how often this runs
|
Python
|
mit
|
california-civic-data-coalition/django-calaccess-downloads-website,california-civic-data-coalition/django-calaccess-downloads-website,california-civic-data-coalition/django-calaccess-downloads-website
|
---
+++
@@ -3,7 +3,7 @@
sched = BlockingScheduler()
-@sched.scheduled_job('cron', hour="*/3", minute=0)
+@sched.scheduled_job('cron', hour="10", minute=0)
def updater():
"""
Run our update command every three hours.
|
63fe76240a819a0211aab566c1cd36b31c49c5d9
|
freepacktbook/pushover.py
|
freepacktbook/pushover.py
|
import json
import requests
class PushoverNotification(object):
def __init__(self, pushover_user, pushover_token):
self.pushover_api = 'https://api.pushover.net/1/messages.json'
self.pushover_user = pushover_user
self.pushover_token = pushover_token
def get_image_content(self, image_url):
return requests.get(image_url, stream=True).content
def notify(self, data):
if not all([self.pushover_user, self.pushover_token]):
return
payload = {
'user': self.pushover_user,
'token': self.pushover_token,
'title': data['title'],
'url': data['book_url'],
'url_title': data['title'],
'message': 'Today\'s Free eBook\n%s\n%s' % data['title'], data['description']
}
try:
image_content = get_image_content(data['image_url'].replace(' ', '%20'))
except Exception:
files = None
else:
files = {'attachment': ('cover.jpg', image_content)}
requests.post(
self.pushover_api,
data=payload,
files={
'attachment': ('cover.jpg', image_content)
}
)
|
import json
import requests
class PushoverNotification(object):
def __init__(self, pushover_user, pushover_token):
self.pushover_api = 'https://api.pushover.net/1/messages.json'
self.pushover_user = pushover_user
self.pushover_token = pushover_token
def get_image_content(self, image_url):
return requests.get(image_url, stream=True).content
def notify(self, data):
if not all([self.pushover_user, self.pushover_token]):
return
payload = {
'user': self.pushover_user,
'token': self.pushover_token,
'title': data['title'],
'url': data['book_url'],
'url_title': data['title'],
'message': 'Today\'s Free eBook\n%s\n%s' % (
data['title'], data['description'])
}
try:
image_content = get_image_content(data['image_url'].replace(' ', '%20'))
except Exception:
files = None
else:
files = {'attachment': ('cover.jpg', image_content)}
requests.post(
self.pushover_api,
data=payload,
files=files
)
|
Fix syntax error and reuse variable
|
Fix syntax error and reuse variable
|
Python
|
mit
|
bogdal/freepacktbook
|
---
+++
@@ -21,7 +21,8 @@
'title': data['title'],
'url': data['book_url'],
'url_title': data['title'],
- 'message': 'Today\'s Free eBook\n%s\n%s' % data['title'], data['description']
+ 'message': 'Today\'s Free eBook\n%s\n%s' % (
+ data['title'], data['description'])
}
try:
@@ -34,7 +35,5 @@
requests.post(
self.pushover_api,
data=payload,
- files={
- 'attachment': ('cover.jpg', image_content)
- }
+ files=files
)
|
8404ced0a54df6ab4be3f6d10a4d1201d2105f09
|
fusesoc/build/__init__.py
|
fusesoc/build/__init__.py
|
from fusesoc.build.quartus import Quartus
from fusesoc.build.ise import Ise
def BackendFactory(system):
if system.backend_name == 'quartus':
return Quartus(system)
elif system.backend_name == 'ise':
return Ise(system)
else:
raise Exception("Backend not found")
|
from fusesoc.build.quartus import Quartus
from fusesoc.build.ise import Ise
def BackendFactory(system):
if system.backend_name == 'quartus':
return Quartus(system)
elif system.backend_name == 'ise':
return Ise(system)
else:
raise RuntimeError('Backend "{}" not found'.format(systaem.backend_name))
|
Improve error handling for unknown backends
|
Improve error handling for unknown backends
|
Python
|
bsd-2-clause
|
olofk/fusesoc,lowRISC/fusesoc,olofk/fusesoc,lowRISC/fusesoc
|
---
+++
@@ -7,4 +7,4 @@
elif system.backend_name == 'ise':
return Ise(system)
else:
- raise Exception("Backend not found")
+ raise RuntimeError('Backend "{}" not found'.format(systaem.backend_name))
|
8f094e1c3d4a64942cadf5603ce5b23706381fac
|
nubes/cmd/__init__.py
|
nubes/cmd/__init__.py
|
import openstack
def main():
print("Hello Clouds!")
|
import argparse
from nubes import dispatcher
def main():
parser = argparse.ArgumentParser(description='Universal IaaS CLI')
parser.add_argument('connector', help='IaaS Name')
parser.add_argument('resource', help='Resource to perform action')
parser.add_argument('action', help='Action to perform on resource')
parser.add_argument('--auth-url')
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--project-name')
args = parser.parse_args()
dispatch = dispatcher.Dispatcher(args.connector, args.auth_url,
args.username, args.password,
args.project_name)
resource = args.resource
if args.action == 'list':
# make plural
resource = args.resource + 's'
method_name = '_'.join([args.action, resource])
return getattr(dispatch, method_name)()
|
Make crude CLI commands work
|
Make crude CLI commands work
This is mainly as an example to show what it can look like.
|
Python
|
apache-2.0
|
omninubes/nubes
|
---
+++
@@ -1,5 +1,25 @@
-import openstack
+import argparse
+
+from nubes import dispatcher
def main():
- print("Hello Clouds!")
+ parser = argparse.ArgumentParser(description='Universal IaaS CLI')
+ parser.add_argument('connector', help='IaaS Name')
+ parser.add_argument('resource', help='Resource to perform action')
+ parser.add_argument('action', help='Action to perform on resource')
+ parser.add_argument('--auth-url')
+ parser.add_argument('--username')
+ parser.add_argument('--password')
+ parser.add_argument('--project-name')
+ args = parser.parse_args()
+ dispatch = dispatcher.Dispatcher(args.connector, args.auth_url,
+ args.username, args.password,
+ args.project_name)
+ resource = args.resource
+ if args.action == 'list':
+ # make plural
+ resource = args.resource + 's'
+
+ method_name = '_'.join([args.action, resource])
+ return getattr(dispatch, method_name)()
|
15a9d8b9e361462532ed286abce4ee445b9ec74a
|
analytics/rejections.py
|
analytics/rejections.py
|
# -*- encoding: utf-8
"""
I get a bunch of requests that are uninteresting for some reason -- maybe
somebody trying to find a PHP admin page, or crawling for vulnerable WordPress
instances. Any such request can immediately be rejected as uninteresting
for my analytics.
"""
from urllib.parse import urlparse
BAD_PATHS = [
'/admin/',
'/dbadmin/',
]
def should_be_rejected(log_line):
if urlparse(log_line.url).path in BAD_PATHS:
return True
return False
|
# -*- encoding: utf-8
"""
I get a bunch of requests that are uninteresting for some reason -- maybe
somebody trying to find a PHP admin page, or crawling for vulnerable WordPress
instances. Any such request can immediately be rejected as uninteresting
for my analytics.
"""
from urllib.parse import urlparse
BAD_PATHS = [
'/admin/',
'/dbadmin/',
'/myadmin/',
'/mysqladmin/',
'/mysql-admin/',
'/mysqlmanager/',
'/sqlmanager/',
'/sqlweb/',
'/webdb/',
'/websql/',
]
def should_be_rejected(log_line):
if urlparse(log_line.url).path in BAD_PATHS:
return True
return False
|
Add more to the list of bad paths
|
Add more to the list of bad paths
|
Python
|
mit
|
alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net
|
---
+++
@@ -12,6 +12,14 @@
BAD_PATHS = [
'/admin/',
'/dbadmin/',
+ '/myadmin/',
+ '/mysqladmin/',
+ '/mysql-admin/',
+ '/mysqlmanager/',
+ '/sqlmanager/',
+ '/sqlweb/',
+ '/webdb/',
+ '/websql/',
]
|
9f6b664c4b0f45828ef8d8a77cdae30bba6ee3a8
|
buildPy2app.py
|
buildPy2app.py
|
"""
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
from setuptools import setup
from glob import glob
import syncplay
APP = ['syncplayClient.py']
DATA_FILES = [
('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')),
]
OPTIONS = {
'iconfile':'resources/icon.icns',
'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'},
'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'},
'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib'],
'plist': {
'CFBundleName':'Syncplay',
'CFBundleShortVersionString':syncplay.version,
'CFBundleIdentifier':'pl.syncplay.Syncplay',
'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved'
}
}
setup(
app=APP,
name='Syncplay',
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
|
"""
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
from setuptools import setup
from glob import glob
import syncplay
APP = ['syncplayClient.py']
DATA_FILES = [
('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')),
]
OPTIONS = {
'iconfile':'resources/icon.icns',
'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'},
'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'},
'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'],
'plist': {
'CFBundleName':'Syncplay',
'CFBundleShortVersionString':syncplay.version,
'CFBundleIdentifier':'pl.syncplay.Syncplay',
'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved'
}
}
setup(
app=APP,
name='Syncplay',
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
|
Update py2app script for Qt 5.11
|
Update py2app script for Qt 5.11
|
Python
|
apache-2.0
|
Syncplay/syncplay,NeverDecaf/syncplay,alby128/syncplay,Syncplay/syncplay,NeverDecaf/syncplay,alby128/syncplay
|
---
+++
@@ -17,7 +17,7 @@
'iconfile':'resources/icon.icns',
'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'},
'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'},
- 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib'],
+ 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'],
'plist': {
'CFBundleName':'Syncplay',
'CFBundleShortVersionString':syncplay.version,
|
8f0befc2bd6e42c544e30630a82fdcec291dfe1f
|
judge/telerik_academy_auth.py
|
judge/telerik_academy_auth.py
|
from django.contrib.auth.models import User
from dmoj import settings
import json
import requests
from judge.models import Profile, Language
class RemoteUserBackend (object):
def get_login_url(self, api_key, username, password):
return 'https://telerikacademy.com/Api/Users/CheckUserLogin?apiKey=%s&usernameoremail=%s&password=%s' % (api_key, username, password)
def authenticate(self, username=None, password=None):
# Telerik Academy Authentication
r = requests.post(self.get_login_url(settings.API_KEY, username, password))
result = json.loads(r.content)
if result['IsValid']:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User(username=username)
user.save()
profile, _ = Profile.objects.get_or_create(user=user, defaults={
'language': Language.get_python2(),
'timezone': 'Europe/Sofia',
})
profile.name = username
profile.save()
if result['IsAdmin']:
user.is_staff = True
user.is_superuser = True
else:
user.is_staff = False
user.is_superuser = False
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.contrib.auth.models import User
from dmoj import settings
import json
import requests
from judge.models import Profile, Language
class RemoteUserBackend (object):
def get_login_url(self, api_key, username, password):
return 'https://telerikacademy.com/Api/Users/CheckUserLogin?apiKey=%s&usernameoremail=%s&password=%s' % (api_key, username, password)
def authenticate(self, username=None, password=None):
# Telerik Academy Authentication
r = requests.post(self.get_login_url(settings.API_KEY, username, password))
result = json.loads(r.content)
if result['IsValid']:
real_username = result['UserName']
try:
user = User.objects.get(username=real_username)
except User.DoesNotExist:
user = User(username=real_username)
user.save()
profile, _ = Profile.objects.get_or_create(user=user, defaults={
'language': Language.get_python2(),
'timezone': 'Europe/Sofia',
})
profile.name = real_username
profile.save()
if result['IsAdmin']:
user.is_staff = True
user.is_superuser = True
else:
user.is_staff = False
user.is_superuser = False
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
Use username provided by telerik academy auth API
|
Use username provided by telerik academy auth API
|
Python
|
agpl-3.0
|
Minkov/site,Minkov/site,Minkov/site,Minkov/site
|
---
+++
@@ -19,10 +19,12 @@
result = json.loads(r.content)
if result['IsValid']:
+ real_username = result['UserName']
+
try:
- user = User.objects.get(username=username)
+ user = User.objects.get(username=real_username)
except User.DoesNotExist:
- user = User(username=username)
+ user = User(username=real_username)
user.save()
@@ -31,7 +33,7 @@
'timezone': 'Europe/Sofia',
})
- profile.name = username
+ profile.name = real_username
profile.save()
if result['IsAdmin']:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.