commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
67ea5109ddcdb19d77de882960d5eb791c1368ae | setup.py | setup.py | #!/usr/bin/python3
# SPDX-License-Identifier: LGPL-2.1+
from setuptools import setup, Command, find_packages
class BuildManpage(Command):
description = ('builds the manpage')
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
self.spawn(['pandoc', '-t', 'man', '-s', '-o', 'man/mkosi.1', 'mkosi.md'])
setup(
name="mkosi",
version="13",
description="Build Bespoke OS Images",
url="https://github.com/systemd/mkosi",
maintainer="mkosi contributors",
maintainer_email="systemd-devel@lists.freedesktop.org",
license="LGPLv2+",
python_requires=">=3.7",
packages = find_packages(".", exclude=["tests"]),
package_data = {"": ["*.sh", "*.hook", "*.conf"]},
include_package_data = True,
scripts = ["bin/mkosi"],
cmdclass = { "man": BuildManpage },
data_files = [('share/man/man1', ["man/mkosi.1"])],
)
| #!/usr/bin/python3
# SPDX-License-Identifier: LGPL-2.1+
from setuptools import setup, Command, find_packages
class BuildManpage(Command):
description = ('builds the manpage')
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
self.spawn(['pandoc', '-t', 'man', '-s', '-o', 'man/mkosi.1', 'mkosi.md'])
setup(
name="mkosi",
version="13",
description="Build Bespoke OS Images",
url="https://github.com/systemd/mkosi",
maintainer="mkosi contributors",
maintainer_email="systemd-devel@lists.freedesktop.org",
license="LGPLv2+",
python_requires=">=3.7",
packages = find_packages(".", exclude=["tests"]),
package_data = {"": ["*.sh", "*.hook", "*.conf", "*.install"]},
include_package_data = True,
scripts = ["bin/mkosi"],
cmdclass = { "man": BuildManpage },
data_files = [('share/man/man1', ["man/mkosi.1"])],
)
| Include any files ending in '.install' in package data | Include any files ending in '.install' in package data
This makes sure the new `dpkg-reconfigure-dracut.install` file under resources
gets included as package data.
| Python | lgpl-2.1 | systemd/mkosi,systemd/mkosi | ---
+++
@@ -26,7 +26,7 @@
license="LGPLv2+",
python_requires=">=3.7",
packages = find_packages(".", exclude=["tests"]),
- package_data = {"": ["*.sh", "*.hook", "*.conf"]},
+ package_data = {"": ["*.sh", "*.hook", "*.conf", "*.install"]},
include_package_data = True,
scripts = ["bin/mkosi"],
cmdclass = { "man": BuildManpage }, |
b825ee12fd6abc91b80b8a62886b9c53b82cdeeb | test/task_test.py | test/task_test.py | import doctest
import unittest
import luigi.task
class TaskTest(unittest.TestCase):
def test_tasks_doctest(self):
doctest.testmod(luigi.task)
| import doctest
import unittest
import luigi.task
import luigi
from datetime import datetime, timedelta
class DummyTask(luigi.Task):
param = luigi.Parameter()
bool_param = luigi.BooleanParameter()
int_param = luigi.IntParameter()
float_param = luigi.FloatParameter()
date_param = luigi.DateParameter()
datehour_param = luigi.DateHourParameter()
timedelta_param = luigi.TimeDeltaParameter()
list_param = luigi.Parameter(is_list=True)
class TaskTest(unittest.TestCase):
def test_tasks_doctest(self):
doctest.testmod(luigi.task)
def test_task_to_str_to_task(self):
params = dict(
param='test',
bool_param=True,
int_param=666,
float_param=123.456,
date_param=datetime(2014, 9, 13).date(),
datehour_param=datetime(2014, 9, 13, 9),
timedelta_param=timedelta(44), # doesn't support seconds
list_param=['in', 'flames'])
original = DummyTask(**params)
other = DummyTask.from_str_params(original.to_str_params(), {})
self.assertEqual(original, other)
if __name__ == '__main__':
unittest.main()
| Add test for task to str params conversion | Add test for task to str params conversion
| Python | apache-2.0 | soxofaan/luigi,adaitche/luigi,alkemics/luigi,dlstadther/luigi,fw1121/luigi,lungetech/luigi,linsomniac/luigi,moandcompany/luigi,mbruggmann/luigi,vine/luigi,aeron15/luigi,leafjungle/luigi,Houzz/luigi,springcoil/luigi,ViaSat/luigi,Tarrasch/luigi,oldpa/luigi,linsomniac/luigi,dstandish/luigi,kalaidin/luigi,jw0201/luigi,h3biomed/luigi,aeron15/luigi,DomainGroupOSS/luigi,huiyi1990/luigi,alkemics/luigi,dstandish/luigi,laserson/luigi,jw0201/luigi,soxofaan/luigi,graingert/luigi,wakamori/luigi,dylanjbarth/luigi,dlstadther/luigi,alkemics/luigi,kevhill/luigi,Yoone/luigi,SeedScientific/luigi,spotify/luigi,PeteW/luigi,humanlongevity/luigi,JackDanger/luigi,sahitya-pavurala/luigi,glenndmello/luigi,moritzschaefer/luigi,Magnetic/luigi,LamCiuLoeng/luigi,walkers-mv/luigi,moritzschaefer/luigi,dhruvg/luigi,bmaggard/luigi,bowlofstew/luigi,samepage-labs/luigi,fabriziodemaria/luigi,pkexcellent/luigi,kevhill/luigi,dkroy/luigi,JackDanger/luigi,walkers-mv/luigi,ehdr/luigi,LamCiuLoeng/luigi,samepage-labs/luigi,spotify/luigi,ViaSat/luigi,tuulos/luigi,ThQ/luigi,wakamori/luigi,moandcompany/luigi,mbruggmann/luigi,belevtsoff/luigi,neilisaac/luigi,slvnperron/luigi,anyman/luigi,realgo/luigi,ThQ/luigi,ChrisBeaumont/luigi,ThQ/luigi,graingert/luigi,rizzatti/luigi,ZhenxingWu/luigi,torypages/luigi,jamesmcm/luigi,glenndmello/luigi,meyerson/luigi,ChrisBeaumont/luigi,kalaidin/luigi,rizzatti/luigi,mbruggmann/luigi,linearregression/luigi,springcoil/luigi,gpoulin/luigi,huiyi1990/luigi,aeron15/luigi,stephenpascoe/luigi,rayrrr/luigi,aeron15/luigi,lichia/luigi,linearregression/luigi,penelopy/luigi,hadesbox/luigi,ContextLogic/luigi,hellais/luigi,SkyTruth/luigi,theoryno3/luigi,fabriziodemaria/luigi,pkexcellent/luigi,Houzz/luigi,anyman/luigi,bowlofstew/luigi,jw0201/luigi,kalaidin/luigi,moritzschaefer/luigi,joeshaw/luigi,mfcabrera/luigi,ViaSat/luigi,rizzatti/luigi,dlstadther/luigi,thejens/luigi,gpoulin/luigi,mfcabrera/luigi,penelopy/luigi,thejens/luigi,lungetech/luigi,h3biomed/luigi,bmaggard/luigi,fabriziodemaria/luigi,ThQ/luigi,ivannotes/luigi,penelopy/luigi,casey-green/luigi,walkers-mv/luigi,hadesbox/luigi,ChrisBeaumont/luigi,kalaidin/luigi,sahitya-pavurala/luigi,hellais/luigi,laserson/luigi,gpoulin/luigi,altaf-ali/luigi,lichia/luigi,anyman/luigi,stephenpascoe/luigi,meyerson/luigi,lichia/luigi,ViaSat/luigi,Yoone/luigi,walkers-mv/luigi,joeshaw/luigi,realgo/luigi,republic-analytics/luigi,stroykova/luigi,Tarrasch/luigi,vine/luigi,samuell/luigi,foursquare/luigi,JackDanger/luigi,percyfal/luigi,tuulos/luigi,ContextLogic/luigi,upworthy/luigi,jw0201/luigi,stroykova/luigi,Yoone/luigi,lungetech/luigi,pkexcellent/luigi,humanlongevity/luigi,hellais/luigi,Houzz/luigi,lungetech/luigi,Dawny33/luigi,humanlongevity/luigi,hellais/luigi,ZhenxingWu/luigi,hadesbox/luigi,SkyTruth/luigi,dylanjbarth/luigi,edx/luigi,ehdr/luigi,glenndmello/luigi,PeteW/luigi,qpxu007/luigi,harveyxia/luigi,percyfal/luigi,drincruz/luigi,ehdr/luigi,sahitya-pavurala/luigi,SkyTruth/luigi,ZhenxingWu/luigi,huiyi1990/luigi,javrasya/luigi,penelopy/luigi,joeshaw/luigi,dstandish/luigi,riga/luigi,bowlofstew/luigi,foursquare/luigi,mfcabrera/luigi,dhruvg/luigi,glenndmello/luigi,bmaggard/luigi,Tarrasch/luigi,fw1121/luigi,samuell/luigi,moritzschaefer/luigi,javrasya/luigi,riga/luigi,fabriziodemaria/luigi,DomainGroupOSS/luigi,SkyTruth/luigi,neilisaac/luigi,leafjungle/luigi,dkroy/luigi,realgo/luigi,bmaggard/luigi,wakamori/luigi,qpxu007/luigi,dstandish/luigi,casey-green/luigi,dylanjbarth/luigi,republic-analytics/luigi,drincruz/luigi,vine/luigi,linsomniac/luigi,LamCiuLoeng/luigi,graingert/luigi,PeteW/luigi,riga/luigi,DomainGroupOSS/luigi,altaf-ali/luigi,Wattpad/luigi,JackDanger/luigi,javrasya/luigi,casey-green/luigi,fw1121/luigi,Wattpad/luigi,slvnperron/luigi,stephenpascoe/luigi,harveyxia/luigi,adaitche/luigi,springcoil/luigi,leafjungle/luigi,Houzz/luigi,ChrisBeaumont/luigi,dhruvg/luigi,jamesmcm/luigi,casey-green/luigi,stroykova/luigi,samepage-labs/luigi,moandcompany/luigi,torypages/luigi,h3biomed/luigi,samepage-labs/luigi,lichia/luigi,huiyi1990/luigi,dkroy/luigi,altaf-ali/luigi,tuulos/luigi,kevhill/luigi,javrasya/luigi,LamCiuLoeng/luigi,SeedScientific/luigi,meyerson/luigi,samuell/luigi,rizzatti/luigi,ContextLogic/luigi,foursquare/luigi,laserson/luigi,humanlongevity/luigi,theoryno3/luigi,Dawny33/luigi,neilisaac/luigi,SeedScientific/luigi,belevtsoff/luigi,belevtsoff/luigi,percyfal/luigi,mfcabrera/luigi,mbruggmann/luigi,realgo/luigi,vine/luigi,moandcompany/luigi,belevtsoff/luigi,wakamori/luigi,adaitche/luigi,17zuoye/luigi,linsomniac/luigi,meyerson/luigi,spotify/luigi,torypages/luigi,foursquare/luigi,dhruvg/luigi,DomainGroupOSS/luigi,laserson/luigi,jamesmcm/luigi,Magnetic/luigi,spotify/luigi,tuulos/luigi,fw1121/luigi,ivannotes/luigi,soxofaan/luigi,Yoone/luigi,qpxu007/luigi,gpoulin/luigi,17zuoye/luigi,harveyxia/luigi,hadesbox/luigi,h3biomed/luigi,rayrrr/luigi,17zuoye/luigi,alkemics/luigi,linearregression/luigi,ivannotes/luigi,riga/luigi,Magnetic/luigi,upworthy/luigi,theoryno3/luigi,torypages/luigi,harveyxia/luigi,SeedScientific/luigi,adaitche/luigi,slvnperron/luigi,dkroy/luigi,samuell/luigi,qpxu007/luigi,oldpa/luigi,edx/luigi,upworthy/luigi,linearregression/luigi,Dawny33/luigi,thejens/luigi,Wattpad/luigi,bowlofstew/luigi,republic-analytics/luigi,slvnperron/luigi,percyfal/luigi,edx/luigi,upworthy/luigi,dylanjbarth/luigi,pkexcellent/luigi,leafjungle/luigi,graingert/luigi,soxofaan/luigi,kevhill/luigi,springcoil/luigi,rayrrr/luigi,stephenpascoe/luigi,17zuoye/luigi,theoryno3/luigi,drincruz/luigi,thejens/luigi,rayrrr/luigi,jamesmcm/luigi,Magnetic/luigi,ehdr/luigi,Dawny33/luigi,stroykova/luigi,ivannotes/luigi,sahitya-pavurala/luigi,drincruz/luigi,republic-analytics/luigi,Tarrasch/luigi,ContextLogic/luigi,ZhenxingWu/luigi,neilisaac/luigi,dlstadther/luigi,oldpa/luigi,altaf-ali/luigi,oldpa/luigi,anyman/luigi,joeshaw/luigi,edx/luigi,PeteW/luigi | ---
+++
@@ -2,9 +2,42 @@
import unittest
import luigi.task
+import luigi
+from datetime import datetime, timedelta
+
+
+class DummyTask(luigi.Task):
+
+ param = luigi.Parameter()
+ bool_param = luigi.BooleanParameter()
+ int_param = luigi.IntParameter()
+ float_param = luigi.FloatParameter()
+ date_param = luigi.DateParameter()
+ datehour_param = luigi.DateHourParameter()
+ timedelta_param = luigi.TimeDeltaParameter()
+ list_param = luigi.Parameter(is_list=True)
class TaskTest(unittest.TestCase):
-
- def test_tasks_doctest(self):
- doctest.testmod(luigi.task)
+
+ def test_tasks_doctest(self):
+ doctest.testmod(luigi.task)
+
+ def test_task_to_str_to_task(self):
+ params = dict(
+ param='test',
+ bool_param=True,
+ int_param=666,
+ float_param=123.456,
+ date_param=datetime(2014, 9, 13).date(),
+ datehour_param=datetime(2014, 9, 13, 9),
+ timedelta_param=timedelta(44), # doesn't support seconds
+ list_param=['in', 'flames'])
+
+ original = DummyTask(**params)
+ other = DummyTask.from_str_params(original.to_str_params(), {})
+ self.assertEqual(original, other)
+
+
+if __name__ == '__main__':
+ unittest.main() |
b0202e8882f792feb041070baff7370cacf73751 | tests/test_api.py | tests/test_api.py | # -*- coding: utf-8 -*-
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
| # -*- coding: utf-8 -*-
import subprocess
import time
from unittest import TestCase
from nose.tools import assert_equal
class TestOldApi(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
class TestNewApi(TestCase):
def setUp(self):
self.process = subprocess.Popen(['openfisca', 'serve'])
def tearDown(self):
self.process.terminate()
def test_response(self):
try:
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:6000/parameters', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:6000 after 10s")
| Test france compatibility with the new API | Test france compatibility with the new API
| Python | agpl-3.0 | antoinearnoud/openfisca-france,sgmap/openfisca-france,sgmap/openfisca-france,antoinearnoud/openfisca-france | ---
+++
@@ -19,3 +19,18 @@
subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:2000', '--output-document=/dev/null'])
except subprocess.CalledProcessError:
raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:2000 after 10s")
+
+
+class TestNewApi(TestCase):
+
+ def setUp(self):
+ self.process = subprocess.Popen(['openfisca', 'serve'])
+
+ def tearDown(self):
+ self.process.terminate()
+
+ def test_response(self):
+ try:
+ subprocess.check_call(['wget', '--quiet', '--retry-connrefused', '--waitretry=1', '--tries=10', 'http://localhost:6000/parameters', '--output-document=/dev/null'])
+ except subprocess.CalledProcessError:
+ raise subprocess.CalledProcessError("Could not reach OpenFisca Web API at localhost:6000 after 10s") |
53f2e3e5b58b001743bdedb479697150a9205b3f | buffpy/tests/test_profiles_manager.py | buffpy/tests/test_profiles_manager.py | from nose.tools import eq_
from mock import MagicMock, patch
from buffpy.managers.profiles import Profiles
from buffpy.models.profile import Profile, PATHS
mocked_response = {
'name': 'me',
'service': 'twiter',
'id': 1
}
def test_profiles_manager_all_method():
'''
Test basic profiles retrieving
'''
mocked_api = MagicMock()
mocked_api.get.return_value = [{'a':'b'}]
with patch('buffpy.managers.profiles.Profile') as mocked_profile:
mocked_profile.return_value = 1
profiles = Profiles(api=mocked_api).all()
eq_(profiles, [1])
mocked_api.get.assert_called_once_with(url=PATHS['GET_PROFILES'])
mocked_profile.assert_called_once_with(mocked_api, {'a': 'b'})
def test_profiles_manager_filter_method():
'''
Test basic profiles filtering based on some minimal criteria
'''
mocked_api = MagicMock()
profiles = Profiles(mocked_api, [{'a':'b'}, {'a': 'c'}])
eq_(profiles.filter(a='b'), [{'a': 'b'}])
def test_profiles_manager_filter_method_empty():
'''
Test basic profiles filtering when the manager is empty
'''
mocked_api = MagicMock()
mocked_api.get.return_value = [{'a':'b'}, {'a': 'c'}]
profiles = Profiles(api=mocked_api)
eq_(profiles.filter(a='b'), [Profile(mocked_api, {'a': 'b'})])
| from unittest.mock import MagicMock, patch
from buffpy.managers.profiles import Profiles
from buffpy.models.profile import Profile, PATHS
MOCKED_RESPONSE = {
"name": "me",
"service": "twiter",
"id": 1
}
def test_profiles_manager_all_method():
""" Should retrieve profile info. """
mocked_api = MagicMock()
mocked_api.get.return_value = [{"a": "b"}]
with patch("buffpy.managers.profiles.Profile", return_value=1) as mocked_profile:
profiles = Profiles(api=mocked_api).all()
assert profiles == [1]
mocked_api.get.assert_called_once_with(url=PATHS["GET_PROFILES"])
mocked_profile.assert_called_once_with(mocked_api, {"a": "b"})
def test_profiles_manager_filter_method():
""" Should filter based on criteria. """
mocked_api = MagicMock()
profiles = Profiles(mocked_api, [{"a": "b"}, {"a": "c"}])
assert profiles.filter(a="b") == [{"a": "b"}]
def test_profiles_manager_filter_method_empty():
""" Should filter if profile manager is None. """
mocked_api = MagicMock()
mocked_api.get.return_value = [{"a": "b"}, {"a": "c"}]
profiles = Profiles(api=mocked_api)
assert profiles.filter(a="b") == [Profile(mocked_api, {"a": "b"})]
| Migrate profiles manager tests to pytest | Migrate profiles manager tests to pytest
| Python | mit | vtemian/buffpy | ---
+++
@@ -1,52 +1,44 @@
-from nose.tools import eq_
-from mock import MagicMock, patch
+from unittest.mock import MagicMock, patch
from buffpy.managers.profiles import Profiles
from buffpy.models.profile import Profile, PATHS
-mocked_response = {
- 'name': 'me',
- 'service': 'twiter',
- 'id': 1
+
+MOCKED_RESPONSE = {
+ "name": "me",
+ "service": "twiter",
+ "id": 1
}
+
def test_profiles_manager_all_method():
- '''
- Test basic profiles retrieving
- '''
+ """ Should retrieve profile info. """
- mocked_api = MagicMock()
- mocked_api.get.return_value = [{'a':'b'}]
+ mocked_api = MagicMock()
+ mocked_api.get.return_value = [{"a": "b"}]
- with patch('buffpy.managers.profiles.Profile') as mocked_profile:
- mocked_profile.return_value = 1
+ with patch("buffpy.managers.profiles.Profile", return_value=1) as mocked_profile:
+ profiles = Profiles(api=mocked_api).all()
- profiles = Profiles(api=mocked_api).all()
+ assert profiles == [1]
+ mocked_api.get.assert_called_once_with(url=PATHS["GET_PROFILES"])
+ mocked_profile.assert_called_once_with(mocked_api, {"a": "b"})
- eq_(profiles, [1])
- mocked_api.get.assert_called_once_with(url=PATHS['GET_PROFILES'])
- mocked_profile.assert_called_once_with(mocked_api, {'a': 'b'})
def test_profiles_manager_filter_method():
- '''
- Test basic profiles filtering based on some minimal criteria
- '''
+ """ Should filter based on criteria. """
- mocked_api = MagicMock()
+ mocked_api = MagicMock()
+ profiles = Profiles(mocked_api, [{"a": "b"}, {"a": "c"}])
+ assert profiles.filter(a="b") == [{"a": "b"}]
- profiles = Profiles(mocked_api, [{'a':'b'}, {'a': 'c'}])
-
- eq_(profiles.filter(a='b'), [{'a': 'b'}])
def test_profiles_manager_filter_method_empty():
- '''
- Test basic profiles filtering when the manager is empty
- '''
+ """ Should filter if profile manager is None. """
- mocked_api = MagicMock()
- mocked_api.get.return_value = [{'a':'b'}, {'a': 'c'}]
+ mocked_api = MagicMock()
+ mocked_api.get.return_value = [{"a": "b"}, {"a": "c"}]
+ profiles = Profiles(api=mocked_api)
- profiles = Profiles(api=mocked_api)
-
- eq_(profiles.filter(a='b'), [Profile(mocked_api, {'a': 'b'})])
+ assert profiles.filter(a="b") == [Profile(mocked_api, {"a": "b"})] |
bbc0b9cd9244079c14914763e60ec4ca9eb41b4e | byceps/blueprints/admin/site/forms.py | byceps/blueprints/admin/site/forms.py | """
byceps.blueprints.admin.site.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import SelectField, StringField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
from ....services.email import service as email_service
from ....services.party import service as party_service
class UpdateForm(LocalizedForm):
title = StringField('Titel', validators=[Length(min=1, max=20)])
server_name = StringField('Servername', validators=[InputRequired()])
email_config_id = SelectField('E-Mail-Konfiguration', validators=[InputRequired()])
party_id = SelectField('Party-ID', validators=[Optional()])
def set_email_config_choices(self):
configs = email_service.get_all_configs()
configs.sort(key=lambda config: config.id)
self.email_config_id.choices = [(c.id, c.id) for c in configs]
def set_party_choices(self):
parties = party_service.get_all_parties()
parties.sort(key=lambda party: party.id)
choices = [(str(p.id), p.title) for p in parties]
choices.insert(0, ('', '<keine>'))
self.party_id.choices = choices
class CreateForm(UpdateForm):
id = StringField('ID', validators=[Length(min=1, max=40)])
| """
byceps.blueprints.admin.site.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import SelectField, StringField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
from ....services.email import service as email_service
from ....services.party import service as party_service
class UpdateForm(LocalizedForm):
title = StringField('Titel', validators=[Length(min=1, max=40)])
server_name = StringField('Servername', validators=[InputRequired()])
email_config_id = SelectField('E-Mail-Konfiguration', validators=[InputRequired()])
party_id = SelectField('Party-ID', validators=[Optional()])
def set_email_config_choices(self):
configs = email_service.get_all_configs()
configs.sort(key=lambda config: config.id)
self.email_config_id.choices = [(c.id, c.id) for c in configs]
def set_party_choices(self):
parties = party_service.get_all_parties()
parties.sort(key=lambda party: party.id)
choices = [(str(p.id), p.title) for p in parties]
choices.insert(0, ('', '<keine>'))
self.party_id.choices = choices
class CreateForm(UpdateForm):
id = StringField('ID', validators=[Length(min=1, max=40)])
| Allow site names to be up to 40 chars long (instead of 20) | Allow site names to be up to 40 chars long (instead of 20)
| Python | bsd-3-clause | homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps | ---
+++
@@ -16,7 +16,7 @@
class UpdateForm(LocalizedForm):
- title = StringField('Titel', validators=[Length(min=1, max=20)])
+ title = StringField('Titel', validators=[Length(min=1, max=40)])
server_name = StringField('Servername', validators=[InputRequired()])
email_config_id = SelectField('E-Mail-Konfiguration', validators=[InputRequired()])
party_id = SelectField('Party-ID', validators=[Optional()]) |
1ab04355c0172682e9948847a01b073239d0ae64 | words.py | words.py | """Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word():
"""Get a random word from the wordlist."""
words = []
with open(WORDLIST, 'r') as f:
for word in f:
words.append(word)
return random.choice(words)
def get_random_word_scalable():
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
| """Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
| Use scalable get_word by default | Use scalable get_word by default
| Python | mit | andrewyang96/HangmanGame | ---
+++
@@ -5,21 +5,14 @@
WORDLIST = 'wordlist.txt'
-def get_random_word():
- """Get a random word from the wordlist."""
- words = []
- with open(WORDLIST, 'r') as f:
- for word in f:
- words.append(word)
- return random.choice(words)
-
-
-def get_random_word_scalable():
+def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
+ if len(word) < min_word_length:
+ continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word |
ba4a68871ee326de94203bf401e4d325b87bec9c | docs/conf.py | docs/conf.py | import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2020, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| Update copyright string in docs | Update copyright string in docs
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>
| Python | bsd-3-clause | pymanopt/pymanopt,pymanopt/pymanopt | ---
+++
@@ -4,7 +4,7 @@
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
-copyright = "2016-2020, {:s}".format(author)
+copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings |
e051ae3bdada17f31eb1c4ed68bcd41e6e20deab | cea/interfaces/dashboard/api/dashboard.py | cea/interfaces/dashboard/api/dashboard.py | from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
| from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
out = []
for d in dashboards:
dashboard = d.to_dict()
for i, plot in enumerate(dashboard['plots']):
dashboard['plots'][i]['title'] = d.plots[i].title
out.append(dashboard)
return out
| Include plot title to plots | Include plot title to plots
| Python | mit | architecture-building-systems/CEAforArcGIS,architecture-building-systems/CEAforArcGIS | ---
+++
@@ -21,7 +21,11 @@
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
- return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
- 'plots': [{'title': plot.title, 'scenario':
- plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
- for plot in d.plots]} for d in dashboards]
+ out = []
+ for d in dashboards:
+ dashboard = d.to_dict()
+ for i, plot in enumerate(dashboard['plots']):
+ dashboard['plots'][i]['title'] = d.plots[i].title
+ out.append(dashboard)
+
+ return out |
7e9f8ca01b9cb1a70ee09dac9e0eecb8d370ad1f | acq4/devices/FalconTurret/falconturret.py | acq4/devices/FalconTurret/falconturret.py | import falconoptics
from ..FilterWheel import FilterWheel, FilterWheelFuture
class FalconTurret(FilterWheel):
def __init__(self, dm, config, name):
self.dev = falconoptics.Falcon(config_file=None, update_nonvolitile=True)
self.dev.home(block=False)
FilterWheel.__init__(self, dm, config, name)
def getPositionCount(self):
return self.dev._total_slides
def _getPosition(self):
return int(self.dev.current_slide)
def _setPosition(self, pos):
if pos == 'home':
self.dev.home(block=False)
else:
self.dev.move_to_slide(pos, block=False)
return FalconTurretFuture(self, pos)
def home(self):
self.setPosition('home')
def _stop(self):
self.dev.emergency_stop()
def isMoving(self):
return self.dev.is_moving
class FalconTurretFuture(FilterWheelFuture):
def _atTarget(self):
if self.position == 'home':
return self.dev.dev.is_homed
else:
return FilterWheelFuture._atTarget()
| from acq4.pyqtgraph.Qt import QtGui
import falconoptics
from ..FilterWheel import FilterWheel, FilterWheelFuture
class FalconTurret(FilterWheel):
def __init__(self, dm, config, name):
self.dev = falconoptics.Falcon(config_file=None, update_nonvolitile=True)
self.dev.home(block=False)
FilterWheel.__init__(self, dm, config, name)
def getPositionCount(self):
return self.dev._total_slides
def _getPosition(self):
return int(self.dev.current_slide)
def _setPosition(self, pos):
if pos == 'home':
self.dev.home(block=False)
else:
self.dev.move_to_slide(pos, block=False)
return FalconTurretFuture(self, pos)
def home(self):
"""Search for home position on turret; used to recalibrate wheel location.
"""
self.setPosition('home')
def _stop(self):
self.dev.emergency_stop()
def isMoving(self):
return self.dev.is_moving
class FalconTurretFuture(FilterWheelFuture):
def _atTarget(self):
if self.position == 'home':
return self.dev.dev.is_homed
else:
return FilterWheelFuture._atTarget()
class FalconDevGui(FilterWheelDevGui):
def __init__(self, dev):
FilterWheelDevGui.__init__(self, dev)
self.homeBtn = QtGui.QPushButton("Find Home")
self.homeBtn.clicked.connect(self.dev.home)
| Add home button to falcon turret dev gui | Add home button to falcon turret dev gui
| Python | mit | campagnola/acq4,acq4/acq4,pbmanis/acq4,pbmanis/acq4,meganbkratz/acq4,acq4/acq4,pbmanis/acq4,campagnola/acq4,campagnola/acq4,meganbkratz/acq4,pbmanis/acq4,meganbkratz/acq4,campagnola/acq4,acq4/acq4,acq4/acq4,meganbkratz/acq4 | ---
+++
@@ -1,3 +1,4 @@
+from acq4.pyqtgraph.Qt import QtGui
import falconoptics
from ..FilterWheel import FilterWheel, FilterWheelFuture
@@ -23,6 +24,8 @@
return FalconTurretFuture(self, pos)
def home(self):
+ """Search for home position on turret; used to recalibrate wheel location.
+ """
self.setPosition('home')
def _stop(self):
@@ -38,3 +41,11 @@
return self.dev.dev.is_homed
else:
return FilterWheelFuture._atTarget()
+
+
+class FalconDevGui(FilterWheelDevGui):
+ def __init__(self, dev):
+ FilterWheelDevGui.__init__(self, dev)
+
+ self.homeBtn = QtGui.QPushButton("Find Home")
+ self.homeBtn.clicked.connect(self.dev.home) |
3e617e3ade1fa55562868c2e2bf8bc07f9b09a79 | skflow/tests/test_io.py | skflow/tests/test_io.py | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
from sklearn import datasets
from sklearn.metrics import accuracy_score, mean_squared_error, log_loss
import tensorflow as tf
from tensorflow.python.platform import googletest
from skflow.io import *
import skflow
class BaseTest(googletest.TestCase):
def test_pandas_data_classification(self):
if HAS_PANDAS:
random.seed(42)
iris = datasets.load_iris()
data = DataFrame(iris.data)
labels = DataFrame(iris.target)
classifier = skflow.TensorFlowLinearClassifier(n_classes=3)
classifier.fit(data, labels)
score = accuracy_score(classifier.predict(data), labels)
self.assertGreater(score, 0.5, "Failed with score = {0}".format(score))
if __name__ == '__main__':
tf.test.main()
| # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
from sklearn import datasets
from sklearn.metrics import accuracy_score
import tensorflow as tf
from tensorflow.python.platform import googletest
from skflow.io import *
import skflow
class BaseTest(googletest.TestCase):
def test_pandas_data_classification(self):
if HAS_PANDAS:
random.seed(42)
iris = datasets.load_iris()
data = DataFrame(iris.data)
labels = DataFrame(iris.target)
classifier = skflow.TensorFlowLinearClassifier(n_classes=3)
classifier.fit(data, labels)
score = accuracy_score(classifier.predict(data), labels)
self.assertGreater(score, 0.5, "Failed with score = {0}".format(score))
else:
print("No pandas installed. test_pandas_data_classification skipped.")
if __name__ == '__main__':
tf.test.main()
| Print when pandas not installed and removed unnecessary imports | Print when pandas not installed and removed unnecessary imports
| Python | apache-2.0 | anand-c-goog/tensorflow,alheinecke/tensorflow-xsmm,sandeepdsouza93/TensorFlow-15712,alisidd/tensorflow,paolodedios/tensorflow,kevin-coder/tensorflow-fork,sjperkins/tensorflow,yanchen036/tensorflow,with-git/tensorflow,benoitsteiner/tensorflow,kobejean/tensorflow,wangyum/tensorflow,Moriadry/tensorflow,rdipietro/tensorflow,Xeralux/tensorflow,anilmuthineni/tensorflow,ibmsoe/tensorflow,cancan101/tensorflow,xodus7/tensorflow,nikste/tensorflow,dhalleine/tensorflow,ZhangXinNan/tensorflow,jwlawson/tensorflow,thjashin/tensorflow,elingg/tensorflow,zasdfgbnm/tensorflow,ppwwyyxx/tensorflow,HKUST-SING/tensorflow,mavenlin/tensorflow,panmari/tensorflow,thesuperzapper/tensorflow,ppries/tensorflow,sandeepdsouza93/TensorFlow-15712,benoitsteiner/tensorflow,LUTAN/tensorflow,AndreasMadsen/tensorflow,nolanliou/tensorflow,pierreg/tensorflow,karllessard/tensorflow,ivano666/tensorflow,seaotterman/tensorflow,laszlocsomor/tensorflow,horance-liu/tensorflow,awni/tensorflow,sjperkins/tensorflow,haeusser/tensorflow,AnishShah/tensorflow,jhaux/tensorflow,seanli9jan/tensorflow,ArtsiomCh/tensorflow,freedomtan/tensorflow,EvenStrangest/tensorflow,annarev/tensorflow,jhaux/tensorflow,adamtiger/tensorflow,anilmuthineni/tensorflow,aldian/tensorflow,dendisuhubdy/tensorflow,jhaux/tensorflow,adamtiger/tensorflow,code-sauce/tensorflow,maciekcc/tensorflow,Intel-tensorflow/tensorflow,lakshayg/tensorflow,cg31/tensorflow,zycdragonball/tensorflow,gautam1858/tensorflow,mixturemodel-flow/tensorflow,calebfoss/tensorflow,tntnatbry/tensorflow,eaplatanios/tensorflow,taknevski/tensorflow-xsmm,av8ramit/tensorflow,jart/tensorflow,girving/tensorflow,nanditav/15712-TensorFlow,tensorflow/tensorflow,admcrae/tensorflow,markslwong/tensorflow,alistairlow/tensorflow,xzturn/tensorflow,TakayukiSakai/tensorflow,eaplatanios/tensorflow,kamcpp/tensorflow,dancingdan/tensorflow,alisidd/tensorflow,annarev/tensorflow,nburn42/tensorflow,DavidNorman/tensorflow,peterbraden/tensorflow,cg31/tensorflow,ppwwyyxx/tensorflow,unsiloai/syntaxnet-ops-hack,dancingdan/tensorflow,cancan101/tensorflow,aselle/tensorflow,ppwwyyxx/tensorflow,AnishShah/tensorflow,aldian/tensorflow,DavidNorman/tensorflow,dyoung418/tensorflow,Kongsea/tensorflow,andrewcmyers/tensorflow,kobejean/tensorflow,chris-chris/tensorflow,calebfoss/tensorflow,paolodedios/tensorflow,Kongsea/tensorflow,xzturn/tensorflow,nikste/tensorflow,gautam1858/tensorflow,gunan/tensorflow,ran5515/DeepDecision,kamcpp/tensorflow,zycdragonball/tensorflow,Bismarrck/tensorflow,xodus7/tensorflow,ArtsiomCh/tensorflow,dancingdan/tensorflow,ZhangXinNan/tensorflow,code-sauce/tensorflow,yanchen036/tensorflow,tongwang01/tensorflow,seaotterman/tensorflow,alistairlow/tensorflow,brchiu/tensorflow,HKUST-SING/tensorflow,neilhan/tensorflow,MoamerEncsConcordiaCa/tensorflow,lukeiwanski/tensorflow-opencl,calebfoss/tensorflow,apark263/tensorflow,nolanliou/tensorflow,nightjean/Deep-Learning,nburn42/tensorflow,laszlocsomor/tensorflow,eaplatanios/tensorflow,ychfan/tensorflow,scenarios/tensorflow,alsrgv/tensorflow,ibmsoe/tensorflow,thesuperzapper/tensorflow,jhaux/tensorflow,alistairlow/tensorflow,LUTAN/tensorflow,dancingdan/tensorflow,asadziach/tensorflow,aam-at/tensorflow,Bulochkin/tensorflow_pack,tornadozou/tensorflow,jeffzheng1/tensorflow,AndreasMadsen/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,thjashin/tensorflow,sjperkins/tensorflow,alsrgv/tensorflow,aselle/tensorflow,xzturn/tensorflow,mortada/tensorflow,JVillella/tensorflow,taknevski/tensorflow-xsmm,chemelnucfin/tensorflow,odejesush/tensorflow,thjashin/tensorflow,dongjoon-hyun/tensorflow,sandeepgupta2k4/tensorflow,wangyum/tensorflow,Mistobaan/tensorflow,hehongliang/tensorflow,ravindrapanda/tensorflow,brchiu/tensorflow,dyoung418/tensorflow,seaotterman/tensorflow,nikste/tensorflow,AndreasMadsen/tensorflow,JingJunYin/tensorflow,kobejean/tensorflow,hehongliang/tensorflow,av8ramit/tensorflow,HKUST-SING/tensorflow,vrv/tensorflow,benoitsteiner/tensorflow-xsmm,theflofly/tensorflow,JingJunYin/tensorflow,SnakeJenny/TensorFlow,benoitsteiner/tensorflow-xsmm,asimshankar/tensorflow,JVillella/tensorflow,yongtang/tensorflow,mortada/tensorflow,mengxn/tensorflow,tomasreimers/tensorflow-emscripten,aselle/tensorflow,snnn/tensorflow,tongwang01/tensorflow,peterbraden/tensorflow,allenlavoie/tensorflow,karllessard/tensorflow,Bulochkin/tensorflow_pack,thjashin/tensorflow,wchan/tensorflow,anand-c-goog/tensorflow,codrut3/tensorflow,MycChiu/tensorflow,Xeralux/tensorflow,jendap/tensorflow,laszlocsomor/tensorflow,ville-k/tensorflow,TakayukiSakai/tensorflow,tornadozou/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,chenjun0210/tensorflow,manjunaths/tensorflow,tomasreimers/tensorflow-emscripten,kevin-coder/tensorflow-fork,eerwitt/tensorflow,jalexvig/tensorflow,pierreg/tensorflow,scenarios/tensorflow,alivecor/tensorflow,eaplatanios/tensorflow,alsrgv/tensorflow,arborh/tensorflow,ghchinoy/tensorflow,AndreasMadsen/tensorflow,jhseu/tensorflow,lukeiwanski/tensorflow,aam-at/tensorflow,anilmuthineni/tensorflow,cg31/tensorflow,nightjean/Deep-Learning,pavelchristof/gomoku-ai,elingg/tensorflow,alshedivat/tensorflow,gautam1858/tensorflow,sandeepgupta2k4/tensorflow,jhseu/tensorflow,ravindrapanda/tensorflow,ghchinoy/tensorflow,taknevski/tensorflow-xsmm,mdrumond/tensorflow,davidzchen/tensorflow,codrut3/tensorflow,nightjean/Deep-Learning,Moriadry/tensorflow,DavidNorman/tensorflow,martinwicke/tensorflow,xodus7/tensorflow,benoitsteiner/tensorflow-opencl,handroissuazo/tensorflow,DCSaunders/tensorflow,juharris/tensorflow,Intel-Corporation/tensorflow,theflofly/tensorflow,HaebinShin/tensorflow,xzturn/tensorflow,ppries/tensorflow,jostep/tensorflow,ibab/tensorflow,tensorflow/tensorflow-pywrap_saved_model,anand-c-goog/tensorflow,zasdfgbnm/tensorflow,LUTAN/tensorflow,ran5515/DeepDecision,benoitsteiner/tensorflow-opencl,nanditav/15712-TensorFlow,seanli9jan/tensorflow,zycdragonball/tensorflow,elingg/tensorflow,apark263/tensorflow,RapidApplicationDevelopment/tensorflow,bowang/tensorflow,adamtiger/tensorflow,ghchinoy/tensorflow,sandeepgupta2k4/tensorflow,krikru/tensorflow-opencl,theflofly/tensorflow,hehongliang/tensorflow,chris-chris/tensorflow,tensorflow/tensorflow-pywrap_saved_model,alheinecke/tensorflow-xsmm,hehongliang/tensorflow,ivano666/tensorflow,tillahoffmann/tensorflow,kamcpp/tensorflow,pierreg/tensorflow,apark263/tensorflow,Intel-tensorflow/tensorflow,Xeralux/tensorflow,bowang/tensorflow,DavidNorman/tensorflow,RapidApplicationDevelopment/tensorflow,brchiu/tensorflow,dansbecker/skflow,ninotoshi/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,juharris/tensorflow,laszlocsomor/tensorflow,ppwwyyxx/tensorflow,asadziach/tensorflow,alsrgv/tensorflow,nolanliou/tensorflow,ychfan/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,ZhangXinNan/tensorflow,cg31/tensorflow,tiagofrepereira2012/tensorflow,sandeepgupta2k4/tensorflow,ghchinoy/tensorflow,alisidd/tensorflow,XueqingLin/tensorflow,ychfan/tensorflow,Mistobaan/tensorflow,krikru/tensorflow-opencl,kevin-coder/tensorflow-fork,xzturn/tensorflow,jhaux/tensorflow,aam-at/tensorflow,pierreg/tensorflow,brchiu/tensorflow,lukeiwanski/tensorflow,guschmue/tensorflow,benoitsteiner/tensorflow-xsmm,jart/tensorflow,gunan/tensorflow,jart/tensorflow,krikru/tensorflow-opencl,guschmue/tensorflow,jhseu/tensorflow,a-doumoulakis/tensorflow,gibiansky/tensorflow,Moriadry/tensorflow,abhitopia/tensorflow,manipopopo/tensorflow,ivano666/tensorflow,sarvex/tensorflow,ibmsoe/tensorflow,anilmuthineni/tensorflow,zycdragonball/tensorflow,andrewcmyers/tensorflow,yufengg/tensorflow,suiyuan2009/tensorflow,petewarden/tensorflow,hfp/tensorflow-xsmm,alivecor/tensorflow,girving/tensorflow,mdrumond/tensorflow,eaplatanios/tensorflow,chris-chris/tensorflow,allenlavoie/tensorflow,admcrae/tensorflow,Bismarrck/tensorflow,renyi533/tensorflow,strint/tensorflow,drpngx/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,manazhao/tf_recsys,tillahoffmann/tensorflow,TakayukiSakai/tensorflow,caisq/tensorflow,awni/tensorflow,av8ramit/tensorflow,ppwwyyxx/tensorflow,Mistobaan/tensorflow,ZhangXinNan/tensorflow,allenlavoie/tensorflow,arborh/tensorflow,DCSaunders/tensorflow,av8ramit/tensorflow,av8ramit/tensorflow,mdrumond/tensorflow,wchan/tensorflow,apark263/tensorflow,mavenlin/tensorflow,Bismarrck/tensorflow,andrewcmyers/tensorflow,pcm17/tensorflow,theflofly/tensorflow,kamcpp/tensorflow,tornadozou/tensorflow,lukeiwanski/tensorflow,theflofly/tensorflow,annarev/tensorflow,HaebinShin/tensorflow,sarvex/tensorflow,ivano666/tensorflow,ghchinoy/tensorflow,benoitsteiner/tensorflow-xsmm,ZhangXinNan/tensorflow,hehongliang/tensorflow,karllessard/tensorflow,ninotoshi/tensorflow,yongtang/tensorflow,eaplatanios/tensorflow,codrut3/tensorflow,drpngx/tensorflow,aselle/tensorflow,yaroslavvb/tensorflow,AndreasMadsen/tensorflow,xodus7/tensorflow,tensorflow/tensorflow,ppwwyyxx/tensorflow,frreiss/tensorflow-fred,ville-k/tensorflow,Bismarrck/tensorflow,cxxgtxy/tensorflow,rabipanda/tensorflow,Moriadry/tensorflow,kamcpp/tensorflow,kevin-coder/tensorflow-fork,maciekcc/tensorflow,hfp/tensorflow-xsmm,suiyuan2009/tensorflow,markslwong/tensorflow,krikru/tensorflow-opencl,jbedorf/tensorflow,meteorcloudy/tensorflow,lukeiwanski/tensorflow,MostafaGazar/tensorflow,lukeiwanski/tensorflow,seaotterman/tensorflow,elingg/tensorflow,laosiaudi/tensorflow,paolodedios/tensorflow,wangyum/tensorflow,mrry/tensorflow,benoitsteiner/tensorflow,kamcpp/tensorflow,jostep/tensorflow,admcrae/tensorflow,TakayukiSakai/tensorflow,ghchinoy/tensorflow,cxxgtxy/tensorflow,nburn42/tensorflow,lakshayg/tensorflow,nburn42/tensorflow,krikru/tensorflow-opencl,johndpope/tensorflow,ibmsoe/tensorflow,Bulochkin/tensorflow_pack,TakayukiSakai/tensorflow,manazhao/tf_recsys,drpngx/tensorflow,taknevski/tensorflow-xsmm,jendap/tensorflow,Xeralux/tensorflow,ville-k/tensorflow,jbedorf/tensorflow,yaroslavvb/tensorflow,ibab/tensorflow,ArtsiomCh/tensorflow,ville-k/tensorflow,petewarden/tensorflow,calebfoss/tensorflow,jart/tensorflow,scenarios/tensorflow,unsiloai/syntaxnet-ops-hack,laosiaudi/tensorflow,JingJunYin/tensorflow,sandeepgupta2k4/tensorflow,girving/tensorflow,Bulochkin/tensorflow_pack,ppwwyyxx/tensorflow,anand-c-goog/tensorflow,petewarden/tensorflow_makefile,sandeepgupta2k4/tensorflow,jalexvig/tensorflow,neilhan/tensorflow,aam-at/tensorflow,gnieboer/tensorflow,pavelchristof/gomoku-ai,ninotoshi/tensorflow,MostafaGazar/tensorflow,xzturn/tensorflow,guschmue/tensorflow,XueqingLin/tensorflow,kevin-coder/tensorflow-fork,tiagofrepereira2012/tensorflow,meteorcloudy/tensorflow,mrry/tensorflow,horance-liu/tensorflow,xzturn/tensorflow,chemelnucfin/tensorflow,DCSaunders/tensorflow,ninotoshi/tensorflow,vrv/tensorflow,strint/tensorflow,cxxgtxy/tensorflow,manjunaths/tensorflow,ArtsiomCh/tensorflow,snnn/tensorflow,scenarios/tensorflow,tomasreimers/tensorflow-emscripten,alivecor/tensorflow,neilhan/tensorflow,whn09/tensorflow,asimshankar/tensorflow,benoitsteiner/tensorflow-xsmm,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,asimshankar/tensorflow,adit-chandra/tensorflow,strint/tensorflow,gunan/tensorflow,chenjun0210/tensorflow,gojira/tensorflow,yanchen036/tensorflow,awni/tensorflow,Bulochkin/tensorflow_pack,wchan/tensorflow,yaroslavvb/tensorflow,benoitsteiner/tensorflow-opencl,tensorflow/tensorflow-pywrap_tf_optimizer,jhseu/tensorflow,whn09/tensorflow,jwlawson/tensorflow,cancan101/tensorflow,av8ramit/tensorflow,asimshankar/tensorflow,annarev/tensorflow,aam-at/tensorflow,brchiu/tensorflow,tornadozou/tensorflow,lukeiwanski/tensorflow-opencl,gunan/tensorflow,aldian/tensorflow,mdrumond/tensorflow,with-git/tensorflow,rabipanda/tensorflow,benoitsteiner/tensorflow-xsmm,ran5515/DeepDecision,aldian/tensorflow,nightjean/Deep-Learning,davidzchen/tensorflow,seanli9jan/tensorflow,ppries/tensorflow,brchiu/tensorflow,ibmsoe/tensorflow,ppwwyyxx/tensorflow,theflofly/tensorflow,zycdragonball/tensorflow,jbedorf/tensorflow,paolodedios/tensorflow,chemelnucfin/tensorflow,with-git/tensorflow,jhaux/tensorflow,pcm17/tensorflow,ibmsoe/tensorflow,lukeiwanski/tensorflow-opencl,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,davidzchen/tensorflow,rdipietro/tensorflow,jwlawson/tensorflow,martinwicke/tensorflow,cxxgtxy/tensorflow,girving/tensorflow,anilmuthineni/tensorflow,allenlavoie/tensorflow,bowang/tensorflow,ravindrapanda/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,apark263/tensorflow,eerwitt/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,mrry/tensorflow,chemelnucfin/tensorflow,jendap/tensorflow,alisidd/tensorflow,meteorcloudy/tensorflow,av8ramit/tensorflow,Mazecreator/tensorflow,mixturemodel-flow/tensorflow,gautam1858/tensorflow,hfp/tensorflow-xsmm,johndpope/tensorflow,mixturemodel-flow/tensorflow,haeusser/tensorflow,EvenStrangest/tensorflow,yaroslavvb/tensorflow,lukeiwanski/tensorflow,Kongsea/tensorflow,gnieboer/tensorflow,juharris/tensorflow,guschmue/tensorflow,drpngx/tensorflow,mengxn/tensorflow,asimshankar/tensorflow,aselle/tensorflow,krikru/tensorflow-opencl,manipopopo/tensorflow,rdipietro/tensorflow,jwlawson/tensorflow,arborh/tensorflow,kamcpp/tensorflow,benoitsteiner/tensorflow-xsmm,benoitsteiner/tensorflow-xsmm,adit-chandra/tensorflow,lukeiwanski/tensorflow,renyi533/tensorflow,ageron/tensorflow,AnishShah/tensorflow,kobejean/tensorflow,cg31/tensorflow,nikste/tensorflow,davidzchen/tensorflow,mavenlin/tensorflow,nanditav/15712-TensorFlow,ychfan/tensorflow,JingJunYin/tensorflow,tntnatbry/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow-pywrap_saved_model,eadgarchen/tensorflow,JingJunYin/tensorflow,freedomtan/tensorflow,unsiloai/syntaxnet-ops-hack,alisidd/tensorflow,MoamerEncsConcordiaCa/tensorflow,Mistobaan/tensorflow,benoitsteiner/tensorflow-opencl,alheinecke/tensorflow-xsmm,awni/tensorflow,chenjun0210/tensorflow,Bismarrck/tensorflow,ravindrapanda/tensorflow,Mistobaan/tensorflow,karllessard/tensorflow,alivecor/tensorflow,alsrgv/tensorflow,JVillella/tensorflow,horance-liu/tensorflow,jalexvig/tensorflow,ArtsiomCh/tensorflow,ivano666/tensorflow,kchodorow/tensorflow,eadgarchen/tensorflow,RapidApplicationDevelopment/tensorflow,haeusser/tensorflow,dongjoon-hyun/tensorflow,yanchen036/tensorflow,jostep/tensorflow,HKUST-SING/tensorflow,meteorcloudy/tensorflow,peterbraden/tensorflow,calebfoss/tensorflow,calebfoss/tensorflow,Intel-Corporation/tensorflow,ghchinoy/tensorflow,ville-k/tensorflow,codrut3/tensorflow,chemelnucfin/tensorflow,DavidNorman/tensorflow,adit-chandra/tensorflow,snnn/tensorflow,ZhangXinNan/tensorflow,hfp/tensorflow-xsmm,horance-liu/tensorflow,asimshankar/tensorflow,guschmue/tensorflow,memo/tensorflow,karllessard/tensorflow,neilhan/tensorflow,strint/tensorflow,abhitopia/tensorflow,alshedivat/tensorflow,kobejean/tensorflow,DCSaunders/tensorflow,theflofly/tensorflow,laszlocsomor/tensorflow,chenjun0210/tensorflow,gibiansky/tensorflow,nburn42/tensorflow,handroissuazo/tensorflow,dongjoon-hyun/tensorflow,alheinecke/tensorflow-xsmm,aselle/tensorflow,Intel-Corporation/tensorflow,admcrae/tensorflow,thjashin/tensorflow,sarvex/tensorflow,aselle/tensorflow,tillahoffmann/tensorflow,ageron/tensorflow,manjunaths/tensorflow,with-git/tensorflow,nolanliou/tensorflow,aselle/tensorflow,juharris/tensorflow,tongwang01/tensorflow,jeffzheng1/tensorflow,taknevski/tensorflow-xsmm,markslwong/tensorflow,dongjoon-hyun/tensorflow,paolodedios/tensorflow,Xeralux/tensorflow,dancingdan/tensorflow,seaotterman/tensorflow,nanditav/15712-TensorFlow,tntnatbry/tensorflow,adamtiger/tensorflow,jalexvig/tensorflow,ppwwyyxx/tensorflow,dancingdan/tensorflow,chris-chris/tensorflow,Mazecreator/tensorflow,arborh/tensorflow,laszlocsomor/tensorflow,code-sauce/tensorflow,tntnatbry/tensorflow,anilmuthineni/tensorflow,Bulochkin/tensorflow_pack,DavidNorman/tensorflow,Kongsea/tensorflow,haeusser/tensorflow,dendisuhubdy/tensorflow,anand-c-goog/tensorflow,alistairlow/tensorflow,dendisuhubdy/tensorflow,odejesush/tensorflow,ninotoshi/tensorflow,HKUST-SING/tensorflow,aam-at/tensorflow,strint/tensorflow,juharris/tensorflow,yanchen036/tensorflow,seanli9jan/tensorflow,peterbraden/tensorflow,wchan/tensorflow,MycChiu/tensorflow,naturali/tensorflow,anilmuthineni/tensorflow,MostafaGazar/tensorflow,Intel-tensorflow/tensorflow,nolanliou/tensorflow,Mistobaan/tensorflow,pavelchristof/gomoku-ai,mixturemodel-flow/tensorflow,theflofly/tensorflow,lakshayg/tensorflow,jhaux/tensorflow,lukeiwanski/tensorflow-opencl,Mistobaan/tensorflow,jostep/tensorflow,strint/tensorflow,arborh/tensorflow,zasdfgbnm/tensorflow,MostafaGazar/tensorflow,gojira/tensorflow,kevin-coder/tensorflow-fork,chemelnucfin/tensorflow,mavenlin/tensorflow,hehongliang/tensorflow,davidzchen/tensorflow,brchiu/tensorflow,kchodorow/tensorflow,nightjean/Deep-Learning,anilmuthineni/tensorflow,jalexvig/tensorflow,yaroslavvb/tensorflow,cxxgtxy/tensorflow,taknevski/tensorflow-xsmm,jbedorf/tensorflow,anand-c-goog/tensorflow,haeusser/tensorflow,MoamerEncsConcordiaCa/tensorflow,jwlawson/tensorflow,pavelchristof/gomoku-ai,petewarden/tensorflow,neilhan/tensorflow,lakshayg/tensorflow,hfp/tensorflow-xsmm,strint/tensorflow,Xeralux/tensorflow,manjunaths/tensorflow,ZhangXinNan/tensorflow,Bulochkin/tensorflow_pack,elingg/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,sjperkins/tensorflow,Intel-Corporation/tensorflow,scenarios/tensorflow,guschmue/tensorflow,kevin-coder/tensorflow-fork,JVillella/tensorflow,paolodedios/tensorflow,jhseu/tensorflow,naturali/tensorflow,dendisuhubdy/tensorflow,TakayukiSakai/tensorflow,abhitopia/tensorflow,gnieboer/tensorflow,raymondxyang/tensorflow,drpngx/tensorflow,alivecor/tensorflow,haeusser/tensorflow,raymondxyang/tensorflow,jostep/tensorflow,hfp/tensorflow-xsmm,sarvex/tensorflow,petewarden/tensorflow_makefile,caisq/tensorflow,haeusser/tensorflow,MoamerEncsConcordiaCa/tensorflow,apark263/tensorflow,thesuperzapper/tensorflow,eerwitt/tensorflow,frreiss/tensorflow-fred,xzturn/tensorflow,ishay2b/tensorflow,ppries/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,abhitopia/tensorflow,alsrgv/tensorflow,meteorcloudy/tensorflow,tornadozou/tensorflow,lakshayg/tensorflow,pcm17/tensorflow,maciekcc/tensorflow,mrry/tensorflow,dancingdan/tensorflow,sarvex/tensorflow,yongtang/tensorflow,freedomtan/tensorflow,tensorflow/skflow,aldian/tensorflow,gojira/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,code-sauce/tensorflow,AnishShah/tensorflow,davidzchen/tensorflow,nanditav/15712-TensorFlow,elingg/tensorflow,eaplatanios/tensorflow,eadgarchen/tensorflow,renyi533/tensorflow,horance-liu/tensorflow,frreiss/tensorflow-fred,HaebinShin/tensorflow,panmari/tensorflow,chris-chris/tensorflow,bowang/tensorflow,cancan101/tensorflow,dyoung418/tensorflow,tongwang01/tensorflow,markslwong/tensorflow,zasdfgbnm/tensorflow,panmari/tensorflow,gautam1858/tensorflow,ychfan/tensorflow,brchiu/tensorflow,codrut3/tensorflow,codrut3/tensorflow,annarev/tensorflow,cxxgtxy/tensorflow,manazhao/tf_recsys,xodus7/tensorflow,kchodorow/tensorflow,petewarden/tensorflow,jbedorf/tensorflow,cancan101/tensorflow,nightjean/Deep-Learning,aselle/tensorflow,abhitopia/tensorflow,petewarden/tensorflow_makefile,nikste/tensorflow,ychfan/tensorflow,elingg/tensorflow,dongjoon-hyun/tensorflow,AndreasMadsen/tensorflow,calebfoss/tensorflow,pierreg/tensorflow,chemelnucfin/tensorflow,brchiu/tensorflow,adamtiger/tensorflow,lakshayg/tensorflow,alisidd/tensorflow,admcrae/tensorflow,cg31/tensorflow,arborh/tensorflow,wchan/tensorflow,Kongsea/tensorflow,Bulochkin/tensorflow_pack,Intel-tensorflow/tensorflow,adamtiger/tensorflow,yufengg/tensorflow,lukeiwanski/tensorflow-opencl,manipopopo/tensorflow,eaplatanios/tensorflow,neilhan/tensorflow,lukeiwanski/tensorflow,jhseu/tensorflow,dansbecker/skflow,snnn/tensorflow,nolanliou/tensorflow,chemelnucfin/tensorflow,naturali/tensorflow,alshedivat/tensorflow,adit-chandra/tensorflow,dancingdan/tensorflow,pavelchristof/gomoku-ai,xzturn/tensorflow,anand-c-goog/tensorflow,with-git/tensorflow,bowang/tensorflow,ArtsiomCh/tensorflow,jendap/tensorflow,panmari/tensorflow,thjashin/tensorflow,chris-chris/tensorflow,neilhan/tensorflow,a-doumoulakis/tensorflow,alisidd/tensorflow,paolodedios/tensorflow,petewarden/tensorflow,AndreasMadsen/tensorflow,tiagofrepereira2012/tensorflow,frreiss/tensorflow-fred,dhalleine/tensorflow,peterbraden/tensorflow,alistairlow/tensorflow,jostep/tensorflow,ran5515/DeepDecision,Intel-Corporation/tensorflow,vrv/tensorflow,kevin-coder/tensorflow-fork,awni/tensorflow,eadgarchen/tensorflow,eerwitt/tensorflow,Bulochkin/tensorflow_pack,jeffzheng1/tensorflow,jalexvig/tensorflow,dhalleine/tensorflow,martinwicke/tensorflow,benoitsteiner/tensorflow,Intel-tensorflow/tensorflow,ivano666/tensorflow,HaebinShin/tensorflow,code-sauce/tensorflow,seanli9jan/tensorflow,zasdfgbnm/tensorflow,annarev/tensorflow,hsaputra/tensorflow,ghchinoy/tensorflow,gnieboer/tensorflow,whn09/tensorflow,markslwong/tensorflow,yaroslavvb/tensorflow,xodus7/tensorflow,jwlawson/tensorflow,kobejean/tensorflow,dhalleine/tensorflow,ppries/tensorflow,elingg/tensorflow,alshedivat/tensorflow,mavenlin/tensorflow,johndpope/tensorflow,elingg/tensorflow,zasdfgbnm/tensorflow,MycChiu/tensorflow,codrut3/tensorflow,martinwicke/tensorflow,ninotoshi/tensorflow,johndpope/tensorflow,guschmue/tensorflow,rabipanda/tensorflow,martinwicke/tensorflow,martinwicke/tensorflow,AnishShah/tensorflow,ppries/tensorflow,MycChiu/tensorflow,jeffzheng1/tensorflow,scenarios/tensorflow,XueqingLin/tensorflow,dancingdan/tensorflow,yanchen036/tensorflow,memo/tensorflow,ArtsiomCh/tensorflow,tomasreimers/tensorflow-emscripten,gibiansky/tensorflow,lukeiwanski/tensorflow,a-doumoulakis/tensorflow,yongtang/tensorflow,tiagofrepereira2012/tensorflow,gnieboer/tensorflow,gautam1858/tensorflow,Carmezim/tensorflow,chris-chris/tensorflow,taknevski/tensorflow-xsmm,lukeiwanski/tensorflow-opencl,asimshankar/tensorflow,ageron/tensorflow,MycChiu/tensorflow,zasdfgbnm/tensorflow,wchan/tensorflow,ravindrapanda/tensorflow,gojira/tensorflow,ppwwyyxx/tensorflow,laosiaudi/tensorflow,RapidApplicationDevelopment/tensorflow,martinwicke/tensorflow,aselle/tensorflow,manipopopo/tensorflow,kchodorow/tensorflow,av8ramit/tensorflow,vrv/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,hsaputra/tensorflow,seanli9jan/tensorflow,zasdfgbnm/tensorflow,alistairlow/tensorflow,sandeepdsouza93/TensorFlow-15712,nikste/tensorflow,pcm17/tensorflow,HKUST-SING/tensorflow,laosiaudi/tensorflow,asimshankar/tensorflow,meteorcloudy/tensorflow,HKUST-SING/tensorflow,yongtang/tensorflow,andrewcmyers/tensorflow,sandeepdsouza93/TensorFlow-15712,adit-chandra/tensorflow,ychfan/tensorflow,renyi533/tensorflow,mavenlin/tensorflow,thesuperzapper/tensorflow,ppwwyyxx/tensorflow,rabipanda/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,admcrae/tensorflow,zasdfgbnm/tensorflow,benoitsteiner/tensorflow-xsmm,dancingdan/tensorflow,apark263/tensorflow,ravindrapanda/tensorflow,jhseu/tensorflow,with-git/tensorflow,AnishShah/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,admcrae/tensorflow,LUTAN/tensorflow,ibab/tensorflow,MycChiu/tensorflow,panmari/tensorflow,renyi533/tensorflow,ravindrapanda/tensorflow,thesuperzapper/tensorflow,sandeepdsouza93/TensorFlow-15712,caisq/tensorflow,caisq/tensorflow,caisq/tensorflow,frreiss/tensorflow-fred,SnakeJenny/TensorFlow,jalexvig/tensorflow,odejesush/tensorflow,frreiss/tensorflow-fred,apark263/tensorflow,jhseu/tensorflow,tntnatbry/tensorflow,odejesush/tensorflow,johndpope/tensorflow,adit-chandra/tensorflow,apark263/tensorflow,thjashin/tensorflow,seaotterman/tensorflow,DCSaunders/tensorflow,yufengg/tensorflow,LUTAN/tensorflow,thjashin/tensorflow,guschmue/tensorflow,llhe/tensorflow,renyi533/tensorflow,MoamerEncsConcordiaCa/tensorflow,LUTAN/tensorflow,jwlawson/tensorflow,Intel-tensorflow/tensorflow,jwlawson/tensorflow,dendisuhubdy/tensorflow,manjunaths/tensorflow,bowang/tensorflow,alisidd/tensorflow,mortada/tensorflow,hsaputra/tensorflow,dendisuhubdy/tensorflow,ageron/tensorflow,Intel-tensorflow/tensorflow,Bulochkin/tensorflow_pack,HKUST-SING/tensorflow,jeffzheng1/tensorflow,Xeralux/tensorflow,xzturn/tensorflow,Mazecreator/tensorflow,EvenStrangest/tensorflow,mortada/tensorflow,jhaux/tensorflow,sandeepdsouza93/TensorFlow-15712,tensorflow/tensorflow-experimental_link_static_libraries_once,davidzchen/tensorflow,dhalleine/tensorflow,LUTAN/tensorflow,andrewcmyers/tensorflow,scenarios/tensorflow,drpngx/tensorflow,jart/tensorflow,hfp/tensorflow-xsmm,ppwwyyxx/tensorflow,tongwang01/tensorflow,gojira/tensorflow,alshedivat/tensorflow,arborh/tensorflow,eerwitt/tensorflow,haeusser/tensorflow,Mistobaan/tensorflow,Mazecreator/tensorflow,dongjoon-hyun/tensorflow,sjperkins/tensorflow,DavidNorman/tensorflow,dongjoon-hyun/tensorflow,theflofly/tensorflow,aam-at/tensorflow,petewarden/tensorflow_makefile,chenjun0210/tensorflow,sandeepgupta2k4/tensorflow,mengxn/tensorflow,tntnatbry/tensorflow,LUTAN/tensorflow,benoitsteiner/tensorflow-xsmm,maciekcc/tensorflow,raymondxyang/tensorflow,alsrgv/tensorflow,llhe/tensorflow,yufengg/tensorflow,adit-chandra/tensorflow,HKUST-SING/tensorflow,sandeepdsouza93/TensorFlow-15712,gautam1858/tensorflow,dongjoon-hyun/tensorflow,sarvex/tensorflow,Carmezim/tensorflow,asadziach/tensorflow,pcm17/tensorflow,nburn42/tensorflow,laszlocsomor/tensorflow,girving/tensorflow,XueqingLin/tensorflow,tornadozou/tensorflow,alsrgv/tensorflow,jbedorf/tensorflow,snnn/tensorflow,tensorflow/tensorflow-pywrap_saved_model,allenlavoie/tensorflow,Bulochkin/tensorflow_pack,apark263/tensorflow,cancan101/tensorflow,hsaputra/tensorflow,chemelnucfin/tensorflow,xodus7/tensorflow,tensorflow/tensorflow,odejesush/tensorflow,kobejean/tensorflow,arborh/tensorflow,dhalleine/tensorflow,hfp/tensorflow-xsmm,gautam1858/tensorflow,johndpope/tensorflow,jhseu/tensorflow,ageron/tensorflow,eadgarchen/tensorflow,girving/tensorflow,rabipanda/tensorflow,handroissuazo/tensorflow,allenlavoie/tensorflow,JVillella/tensorflow,mixturemodel-flow/tensorflow,dongjoon-hyun/tensorflow,gnieboer/tensorflow,jendap/tensorflow,nburn42/tensorflow,gibiansky/tensorflow,alshedivat/tensorflow,arborh/tensorflow,tillahoffmann/tensorflow,sjperkins/tensorflow,XueqingLin/tensorflow,pierreg/tensorflow,bowang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,MostafaGazar/tensorflow,alivecor/tensorflow,rabipanda/tensorflow,benoitsteiner/tensorflow-opencl,zasdfgbnm/tensorflow,mortada/tensorflow,mrry/tensorflow,alheinecke/tensorflow-xsmm,tensorflow/tensorflow-experimental_link_static_libraries_once,alheinecke/tensorflow-xsmm,jwlawson/tensorflow,krikru/tensorflow-opencl,seaotterman/tensorflow,codrut3/tensorflow,RapidApplicationDevelopment/tensorflow,aam-at/tensorflow,Moriadry/tensorflow,TakayukiSakai/tensorflow,eadgarchen/tensorflow,ville-k/tensorflow,tensorflow/skflow,MoamerEncsConcordiaCa/tensorflow,tensorflow/tensorflow-pywrap_saved_model,peterbraden/tensorflow,ninotoshi/tensorflow,llhe/tensorflow,tensorflow/tensorflow-pywrap_saved_model,aldian/tensorflow,Mistobaan/tensorflow,hfp/tensorflow-xsmm,aam-at/tensorflow,wangyum/tensorflow,Carmezim/tensorflow,ishay2b/tensorflow,allenlavoie/tensorflow,AnishShah/tensorflow,juharris/tensorflow,memo/tensorflow,gunan/tensorflow,HaebinShin/tensorflow,a-doumoulakis/tensorflow,allenlavoie/tensorflow,rdipietro/tensorflow,mdrumond/tensorflow,rabipanda/tensorflow,gautam1858/tensorflow,alheinecke/tensorflow-xsmm,Mistobaan/tensorflow,jendap/tensorflow,benoitsteiner/tensorflow-opencl,Bismarrck/tensorflow,lukeiwanski/tensorflow-opencl,a-doumoulakis/tensorflow,dyoung418/tensorflow,dyoung418/tensorflow,tillahoffmann/tensorflow,panmari/tensorflow,laosiaudi/tensorflow,ibmsoe/tensorflow,alsrgv/tensorflow,dendisuhubdy/tensorflow,XueqingLin/tensorflow,ibmsoe/tensorflow,ychfan/tensorflow,benoitsteiner/tensorflow,llhe/tensorflow,petewarden/tensorflow,karllessard/tensorflow,pavelchristof/gomoku-ai,apark263/tensorflow,gibiansky/tensorflow,sandeepgupta2k4/tensorflow,llhe/tensorflow,snnn/tensorflow,suiyuan2009/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Carmezim/tensorflow,markslwong/tensorflow,sjperkins/tensorflow,benoitsteiner/tensorflow-opencl,odejesush/tensorflow,hsaputra/tensorflow,MostafaGazar/tensorflow,suiyuan2009/tensorflow,aam-at/tensorflow,memo/tensorflow,Bismarrck/tensorflow,girving/tensorflow,gunan/tensorflow,nanditav/15712-TensorFlow,jeffzheng1/tensorflow,tensorflow/tensorflow,sandeepdsouza93/TensorFlow-15712,Mistobaan/tensorflow,DavidNorman/tensorflow,MoamerEncsConcordiaCa/tensorflow,jhaux/tensorflow,kamcpp/tensorflow,cancan101/tensorflow,jhaux/tensorflow,seanli9jan/tensorflow,ville-k/tensorflow,memo/tensorflow,llhe/tensorflow,pcm17/tensorflow,admcrae/tensorflow,snnn/tensorflow,tongwang01/tensorflow,ageron/tensorflow,asadziach/tensorflow,benoitsteiner/tensorflow,frreiss/tensorflow-fred,adit-chandra/tensorflow,asimshankar/tensorflow,AnishShah/tensorflow,manjunaths/tensorflow,Xeralux/tensorflow,yongtang/tensorflow,arborh/tensorflow,tiagofrepereira2012/tensorflow,meteorcloudy/tensorflow,abhitopia/tensorflow,cg31/tensorflow,ghchinoy/tensorflow,handroissuazo/tensorflow,vrv/tensorflow,mdrumond/tensorflow,nburn42/tensorflow,yongtang/tensorflow,eerwitt/tensorflow,frreiss/tensorflow-fred,markslwong/tensorflow,adit-chandra/tensorflow,andrewcmyers/tensorflow,calebfoss/tensorflow,theflofly/tensorflow,dongjoon-hyun/tensorflow,gautam1858/tensorflow,eadgarchen/tensorflow,aselle/tensorflow,jwlawson/tensorflow,ibab/tensorflow,mixturemodel-flow/tensorflow,SnakeJenny/TensorFlow,nanditav/15712-TensorFlow,mengxn/tensorflow,DCSaunders/tensorflow,nolanliou/tensorflow,caisq/tensorflow,yongtang/tensorflow,manazhao/tf_recsys,paolodedios/tensorflow,a-doumoulakis/tensorflow,krikru/tensorflow-opencl,with-git/tensorflow,vrv/tensorflow,Mazecreator/tensorflow,jart/tensorflow,alsrgv/tensorflow,seaotterman/tensorflow,andrewcmyers/tensorflow,tongwang01/tensorflow,DCSaunders/tensorflow,laosiaudi/tensorflow,whn09/tensorflow,DCSaunders/tensorflow,xzturn/tensorflow,EvenStrangest/tensorflow,handroissuazo/tensorflow,eadgarchen/tensorflow,Carmezim/tensorflow,girving/tensorflow,ville-k/tensorflow,kobejean/tensorflow,naturali/tensorflow,neilhan/tensorflow,ageron/tensorflow,mengxn/tensorflow,chemelnucfin/tensorflow,tornadozou/tensorflow,Mazecreator/tensorflow,ZhangXinNan/tensorflow,nightjean/Deep-Learning,abhitopia/tensorflow,ishay2b/tensorflow,abhitopia/tensorflow,ishay2b/tensorflow,chemelnucfin/tensorflow,dongjoon-hyun/tensorflow,wangyum/tensorflow,caisq/tensorflow,jhseu/tensorflow,manipopopo/tensorflow,kevin-coder/tensorflow-fork,benoitsteiner/tensorflow,alsrgv/tensorflow,suiyuan2009/tensorflow,alistairlow/tensorflow,yaroslavvb/tensorflow,nolanliou/tensorflow,suiyuan2009/tensorflow,kobejean/tensorflow,kobejean/tensorflow,unsiloai/syntaxnet-ops-hack,dhalleine/tensorflow,ravindrapanda/tensorflow,caisq/tensorflow,whn09/tensorflow,gnieboer/tensorflow,yongtang/tensorflow,petewarden/tensorflow_makefile,llhe/tensorflow,ishay2b/tensorflow,manjunaths/tensorflow,horance-liu/tensorflow,asadziach/tensorflow,freedomtan/tensorflow,tillahoffmann/tensorflow,gunan/tensorflow,with-git/tensorflow,mengxn/tensorflow,Moriadry/tensorflow,dyoung418/tensorflow,sjperkins/tensorflow,ArtsiomCh/tensorflow,thjashin/tensorflow,rabipanda/tensorflow,annarev/tensorflow,ibab/tensorflow,DCSaunders/tensorflow,awni/tensorflow,manipopopo/tensorflow,Xeralux/tensorflow,xodus7/tensorflow,chenjun0210/tensorflow,eaplatanios/tensorflow,jalexvig/tensorflow,chenjun0210/tensorflow,abhitopia/tensorflow,maciekcc/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,renyi533/tensorflow,jwlawson/tensorflow,ZhangXinNan/tensorflow,mavenlin/tensorflow,taknevski/tensorflow-xsmm,SnakeJenny/TensorFlow,RapidApplicationDevelopment/tensorflow,freedomtan/tensorflow,alistairlow/tensorflow,hehongliang/tensorflow,ibab/tensorflow,adamtiger/tensorflow,gojira/tensorflow,ishay2b/tensorflow,peterbraden/tensorflow,rdipietro/tensorflow,ville-k/tensorflow,Mazecreator/tensorflow,alistairlow/tensorflow,krikru/tensorflow-opencl,petewarden/tensorflow_makefile,handroissuazo/tensorflow,thesuperzapper/tensorflow,memo/tensorflow,aldian/tensorflow,nburn42/tensorflow,naturali/tensorflow,cxxgtxy/tensorflow,gibiansky/tensorflow,nanditav/15712-TensorFlow,manipopopo/tensorflow,vrv/tensorflow,lukeiwanski/tensorflow-opencl,mdrumond/tensorflow,memo/tensorflow,jart/tensorflow,jhseu/tensorflow,admcrae/tensorflow,eaplatanios/tensorflow,gojira/tensorflow,ageron/tensorflow,yufengg/tensorflow,ageron/tensorflow,pierreg/tensorflow,drpngx/tensorflow,jbedorf/tensorflow,wangyum/tensorflow,MoamerEncsConcordiaCa/tensorflow,nikste/tensorflow,jostep/tensorflow,thesuperzapper/tensorflow,mengxn/tensorflow,gunan/tensorflow,allenlavoie/tensorflow,karllessard/tensorflow,yufengg/tensorflow,mengxn/tensorflow,raymondxyang/tensorflow,freedomtan/tensorflow,tongwang01/tensorflow,lukeiwanski/tensorflow,manipopopo/tensorflow,johndpope/tensorflow,chenjun0210/tensorflow,RapidApplicationDevelopment/tensorflow,av8ramit/tensorflow,yufengg/tensorflow,ravindrapanda/tensorflow,Bismarrck/tensorflow,gnieboer/tensorflow,alheinecke/tensorflow-xsmm,mrry/tensorflow,tomasreimers/tensorflow-emscripten,alistairlow/tensorflow,brchiu/tensorflow,a-doumoulakis/tensorflow,code-sauce/tensorflow,freedomtan/tensorflow,MycChiu/tensorflow,Moriadry/tensorflow,Carmezim/tensorflow,chris-chris/tensorflow,xzturn/tensorflow,anand-c-goog/tensorflow,brchiu/tensorflow,laszlocsomor/tensorflow,tntnatbry/tensorflow,alheinecke/tensorflow-xsmm,kchodorow/tensorflow,rdipietro/tensorflow,kchodorow/tensorflow,arborh/tensorflow,wangyum/tensorflow,mortada/tensorflow,nikste/tensorflow,MostafaGazar/tensorflow,AndreasMadsen/tensorflow,awni/tensorflow,jart/tensorflow,pavelchristof/gomoku-ai,tiagofrepereira2012/tensorflow,benoitsteiner/tensorflow,snnn/tensorflow,XueqingLin/tensorflow,jendap/tensorflow,cg31/tensorflow,thesuperzapper/tensorflow,unsiloai/syntaxnet-ops-hack,gojira/tensorflow,SnakeJenny/TensorFlow,manipopopo/tensorflow,JVillella/tensorflow,jendap/tensorflow,benoitsteiner/tensorflow,raymondxyang/tensorflow,martinwicke/tensorflow,jbedorf/tensorflow,zycdragonball/tensorflow,eerwitt/tensorflow,sjperkins/tensorflow,haeusser/tensorflow,kevin-coder/tensorflow-fork,aldian/tensorflow,awni/tensorflow,cancan101/tensorflow,freedomtan/tensorflow,ghchinoy/tensorflow,alivecor/tensorflow,girving/tensorflow,seaotterman/tensorflow,horance-liu/tensorflow,horance-liu/tensorflow,guschmue/tensorflow,Bismarrck/tensorflow,zasdfgbnm/tensorflow,cancan101/tensorflow,naturali/tensorflow,JingJunYin/tensorflow,AnishShah/tensorflow,tiagofrepereira2012/tensorflow,ninotoshi/tensorflow,AnishShah/tensorflow,laszlocsomor/tensorflow,asadziach/tensorflow,JingJunYin/tensorflow,ppries/tensorflow,rabipanda/tensorflow,raymondxyang/tensorflow,mortada/tensorflow,zycdragonball/tensorflow,cg31/tensorflow,odejesush/tensorflow,freedomtan/tensorflow,maciekcc/tensorflow,markslwong/tensorflow,peterbraden/tensorflow,frreiss/tensorflow-fred,SnakeJenny/TensorFlow,mrry/tensorflow,ghchinoy/tensorflow,hfp/tensorflow-xsmm,neilhan/tensorflow,Mazecreator/tensorflow,ran5515/DeepDecision,davidzchen/tensorflow,yongtang/tensorflow,meteorcloudy/tensorflow,jeffzheng1/tensorflow,tntnatbry/tensorflow,XueqingLin/tensorflow,annarev/tensorflow,thesuperzapper/tensorflow,freedomtan/tensorflow,karllessard/tensorflow,caisq/tensorflow,vrv/tensorflow,xodus7/tensorflow,adit-chandra/tensorflow,ZhangXinNan/tensorflow,tomasreimers/tensorflow-emscripten,tensorflow/tensorflow,vrv/tensorflow,tomasreimers/tensorflow-emscripten,unsiloai/syntaxnet-ops-hack,AndreasMadsen/tensorflow,raymondxyang/tensorflow,MycChiu/tensorflow,xodus7/tensorflow,jbedorf/tensorflow,eaplatanios/tensorflow,sarvex/tensorflow,seanli9jan/tensorflow,johndpope/tensorflow,av8ramit/tensorflow,hsaputra/tensorflow,meteorcloudy/tensorflow,calebfoss/tensorflow,annarev/tensorflow,snnn/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow,EvenStrangest/tensorflow,DavidNorman/tensorflow,codrut3/tensorflow,benoitsteiner/tensorflow-xsmm,nolanliou/tensorflow,tensorflow/tensorflow-pywrap_saved_model,petewarden/tensorflow,ychfan/tensorflow,JingJunYin/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhseu/tensorflow,dancingdan/tensorflow,unsiloai/syntaxnet-ops-hack,gunan/tensorflow,dendisuhubdy/tensorflow,jbedorf/tensorflow,arborh/tensorflow,scenarios/tensorflow,maciekcc/tensorflow,mavenlin/tensorflow,jart/tensorflow,kchodorow/tensorflow,mdrumond/tensorflow,rabipanda/tensorflow,LUTAN/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,gojira/tensorflow,yaroslavvb/tensorflow,a-doumoulakis/tensorflow,ran5515/DeepDecision,Xeralux/tensorflow,jendap/tensorflow,MycChiu/tensorflow,chenjun0210/tensorflow,laosiaudi/tensorflow,tornadozou/tensorflow,panmari/tensorflow,gojira/tensorflow,mrry/tensorflow,gunan/tensorflow,llhe/tensorflow,odejesush/tensorflow,Intel-Corporation/tensorflow,RapidApplicationDevelopment/tensorflow,suiyuan2009/tensorflow,ville-k/tensorflow,strint/tensorflow,handroissuazo/tensorflow,hsaputra/tensorflow,manjunaths/tensorflow,MoamerEncsConcordiaCa/tensorflow,cxxgtxy/tensorflow,gibiansky/tensorflow,markslwong/tensorflow,SnakeJenny/TensorFlow,HaebinShin/tensorflow,mdrumond/tensorflow,yanchen036/tensorflow,odejesush/tensorflow,alshedivat/tensorflow,benoitsteiner/tensorflow-opencl,alshedivat/tensorflow,ghchinoy/tensorflow,AnishShah/tensorflow,xodus7/tensorflow,aam-at/tensorflow,wangyum/tensorflow,martinwicke/tensorflow,ishay2b/tensorflow,pcm17/tensorflow,hsaputra/tensorflow,hsaputra/tensorflow,ppries/tensorflow,girving/tensorflow,allenlavoie/tensorflow,hfp/tensorflow-xsmm,pavelchristof/gomoku-ai,jalexvig/tensorflow,karllessard/tensorflow,manipopopo/tensorflow,rdipietro/tensorflow,andrewcmyers/tensorflow,hsaputra/tensorflow,ZhangXinNan/tensorflow,jeffzheng1/tensorflow,asadziach/tensorflow,petewarden/tensorflow,kchodorow/tensorflow,rabipanda/tensorflow,davidzchen/tensorflow,bowang/tensorflow,manazhao/tf_recsys,TakayukiSakai/tensorflow,wangyum/tensorflow,mengxn/tensorflow,av8ramit/tensorflow,benoitsteiner/tensorflow,asimshankar/tensorflow,ivano666/tensorflow,asadziach/tensorflow,maciekcc/tensorflow,karllessard/tensorflow,dendisuhubdy/tensorflow,jeffzheng1/tensorflow,guschmue/tensorflow,alshedivat/tensorflow,pierreg/tensorflow,caisq/tensorflow,theflofly/tensorflow,petewarden/tensorflow,freedomtan/tensorflow,ageron/tensorflow,taknevski/tensorflow-xsmm,jart/tensorflow,chris-chris/tensorflow,Intel-Corporation/tensorflow,whn09/tensorflow,unsiloai/syntaxnet-ops-hack,petewarden/tensorflow,rdipietro/tensorflow,lakshayg/tensorflow,ibab/tensorflow,renyi533/tensorflow,nburn42/tensorflow,gibiansky/tensorflow,theflofly/tensorflow,laosiaudi/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,ran5515/DeepDecision,chemelnucfin/tensorflow,gunan/tensorflow,drpngx/tensorflow,mixturemodel-flow/tensorflow,nightjean/Deep-Learning,tomasreimers/tensorflow-emscripten,tillahoffmann/tensorflow,code-sauce/tensorflow,naturali/tensorflow,code-sauce/tensorflow,mortada/tensorflow,RapidApplicationDevelopment/tensorflow,JVillella/tensorflow,Bulochkin/tensorflow_pack,sjperkins/tensorflow,pcm17/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,benoitsteiner/tensorflow-opencl,codrut3/tensorflow,tiagofrepereira2012/tensorflow,Mazecreator/tensorflow,anilmuthineni/tensorflow,kchodorow/tensorflow,raymondxyang/tensorflow,kamcpp/tensorflow,alshedivat/tensorflow,Carmezim/tensorflow,Xeralux/tensorflow,Intel-tensorflow/tensorflow,tomasreimers/tensorflow-emscripten,jostep/tensorflow,wchan/tensorflow,frreiss/tensorflow-fred,handroissuazo/tensorflow,jendap/tensorflow,sandeepdsouza93/TensorFlow-15712,nanditav/15712-TensorFlow,memo/tensorflow,jalexvig/tensorflow,manjunaths/tensorflow,annarev/tensorflow,sandeepgupta2k4/tensorflow,Moriadry/tensorflow,llhe/tensorflow,ageron/tensorflow,JingJunYin/tensorflow,HaebinShin/tensorflow,drpngx/tensorflow,MostafaGazar/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,DavidNorman/tensorflow,sarvex/tensorflow,kobejean/tensorflow,ageron/tensorflow,ppries/tensorflow,manazhao/tf_recsys,eadgarchen/tensorflow,davidzchen/tensorflow,nburn42/tensorflow,whn09/tensorflow,girving/tensorflow,gnieboer/tensorflow,kevin-coder/tensorflow-fork,johndpope/tensorflow,dendisuhubdy/tensorflow,JingJunYin/tensorflow,adit-chandra/tensorflow,petewarden/tensorflow_makefile,gunan/tensorflow,renyi533/tensorflow,juharris/tensorflow,panmari/tensorflow,allenlavoie/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,davidzchen/tensorflow,alshedivat/tensorflow,asadziach/tensorflow,Bismarrck/tensorflow,petewarden/tensorflow_makefile,paolodedios/tensorflow,XueqingLin/tensorflow,drpngx/tensorflow,lukeiwanski/tensorflow-opencl,alisidd/tensorflow,paolodedios/tensorflow,adit-chandra/tensorflow,rdipietro/tensorflow,gibiansky/tensorflow,yanchen036/tensorflow,mortada/tensorflow,petewarden/tensorflow,dyoung418/tensorflow,asimshankar/tensorflow,tensorflow/tensorflow,jalexvig/tensorflow,pcm17/tensorflow,llhe/tensorflow,davidzchen/tensorflow,seanli9jan/tensorflow,naturali/tensorflow,nolanliou/tensorflow,johndpope/tensorflow,nikste/tensorflow,EvenStrangest/tensorflow,renyi533/tensorflow,jbedorf/tensorflow,tensorflow/tensorflow,code-sauce/tensorflow,alsrgv/tensorflow,mrry/tensorflow,strint/tensorflow,jbedorf/tensorflow,jendap/tensorflow,ibab/tensorflow,Kongsea/tensorflow,wchan/tensorflow,manazhao/tf_recsys,laosiaudi/tensorflow,anand-c-goog/tensorflow,tillahoffmann/tensorflow,Bismarrck/tensorflow,scenarios/tensorflow,SnakeJenny/TensorFlow,snnn/tensorflow,mixturemodel-flow/tensorflow,annarev/tensorflow,Intel-tensorflow/tensorflow,eerwitt/tensorflow,meteorcloudy/tensorflow,Carmezim/tensorflow,laszlocsomor/tensorflow,alivecor/tensorflow,DCSaunders/tensorflow,gojira/tensorflow,renyi533/tensorflow,whn09/tensorflow,Carmezim/tensorflow,snnn/tensorflow,lakshayg/tensorflow,eadgarchen/tensorflow,EvenStrangest/tensorflow,eerwitt/tensorflow,tensorflow/tensorflow-pywrap_saved_model,petewarden/tensorflow,MostafaGazar/tensorflow,dyoung418/tensorflow,yaroslavvb/tensorflow,tntnatbry/tensorflow,renyi533/tensorflow,horance-liu/tensorflow,Kongsea/tensorflow,ibmsoe/tensorflow,dhalleine/tensorflow,ivano666/tensorflow,juharris/tensorflow,horance-liu/tensorflow,EvenStrangest/tensorflow,memo/tensorflow,whn09/tensorflow,handroissuazo/tensorflow,aam-at/tensorflow,Kongsea/tensorflow,HaebinShin/tensorflow | ---
+++
@@ -15,7 +15,7 @@
import random
from sklearn import datasets
-from sklearn.metrics import accuracy_score, mean_squared_error, log_loss
+from sklearn.metrics import accuracy_score
import tensorflow as tf
from tensorflow.python.platform import googletest
@@ -34,6 +34,8 @@
classifier.fit(data, labels)
score = accuracy_score(classifier.predict(data), labels)
self.assertGreater(score, 0.5, "Failed with score = {0}".format(score))
+ else:
+ print("No pandas installed. test_pandas_data_classification skipped.")
if __name__ == '__main__':
tf.test.main() |
411581b5b773daceee9d3e2d7751ca652d251c00 | aiosmtplib/__init__.py | aiosmtplib/__init__.py | """
aiosmtplib
==========
An asyncio SMTP client.
Roughly based (with API differences) on smtplib from the Python 3 standard
library by: The Dragon De Monsyne <dragondm@integral.org>
Author: Cole Maclean <hi@cole.io>
"""
__title__ = 'aiosmtplib'
__version__ = '0.1.7'
__author__ = 'Cole Maclean'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Cole Maclean'
from .errors import * # NOQA
from .response import * # NOQA
from .smtp import * # NOQA
from .status import * # NOQA
__all__ = (
errors.__all__ + response.__all__ + smtp.__all__ + status.__all__ # NOQA
)
| """
aiosmtplib
==========
An asyncio SMTP client.
Roughly based (with API differences) on smtplib from the Python 3 standard
library by: The Dragon De Monsyne <dragondm@integral.org>
Author: Cole Maclean <hi@cole.io>
"""
__title__ = 'aiosmtplib'
__version__ = '1.0.0'
__author__ = 'Cole Maclean'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Cole Maclean'
from .errors import * # NOQA
from .response import * # NOQA
from .smtp import * # NOQA
from .status import * # NOQA
__all__ = (
errors.__all__ + response.__all__ + smtp.__all__ + status.__all__ # NOQA
)
| Move to 1.0.0; let's all use semver | Move to 1.0.0; let's all use semver
| Python | mit | cole/aiosmtplib | ---
+++
@@ -11,7 +11,7 @@
"""
__title__ = 'aiosmtplib'
-__version__ = '0.1.7'
+__version__ = '1.0.0'
__author__ = 'Cole Maclean'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Cole Maclean' |
f1cdc9d6d4736202480045aa633fb0ac5992e7e3 | test/views.py | test/views.py | from django.core.urlresolvers import reverse_lazy, reverse
from django import forms
from .models import Author
from popupcrud.views import PopupCrudViewSet
# Create your views here.
class AuthorForm(forms.ModelForm):
sex = forms.ChoiceField(label="Sex", choices=(('M', 'Male'), ('F', 'Female')))
class Meta:
model = Author
fields = ('name', 'age')
class AuthorCrudViewset(PopupCrudViewSet):
model = Author
fields = ('name', 'age')
list_display = ('name', 'age', 'half_age', 'double_age')
list_url = reverse_lazy("authors")
new_url = reverse_lazy("new-author")
"""
form_class = AuthorForm
list_permission_required = ('tests.add_author',)
create_permission_required = ('tests.add_author',)
update_permission_required = ('tests.change_author',)
delete_permission_required = ('tests.delete_author',)
"""
def half_age(self, author):
return author.age/2
half_age.label = "Half Age"
def get_edit_url(self, obj):
return reverse("edit-author", kwargs={'pk': obj.pk})
def get_delete_url(self, obj):
return reverse("delete-author", kwargs={'pk': obj.pk})
| from django.core.urlresolvers import reverse_lazy, reverse
from django import forms
from .models import Author
from popupcrud.views import PopupCrudViewSet
# Create your views here.
class AuthorForm(forms.ModelForm):
sex = forms.ChoiceField(label="Sex", choices=(('M', 'Male'), ('F', 'Female')))
class Meta:
model = Author
fields = ('name', 'age')
class AuthorCrudViewset(PopupCrudViewSet):
model = Author
fields = ('name', 'age')
list_display = ('name', 'age', 'half_age', 'double_age')
list_url = reverse_lazy("authors")
new_url = reverse_lazy("new-author")
"""
form_class = AuthorForm
list_permission_required = ('tests.add_author',)
create_permission_required = ('tests.add_author',)
update_permission_required = ('tests.change_author',)
delete_permission_required = ('tests.delete_author',)
"""
def half_age(self, author):
return int(author.age/2)
half_age.label = "Half Age"
def get_edit_url(self, obj):
return reverse("edit-author", kwargs={'pk': obj.pk})
def get_delete_url(self, obj):
return reverse("delete-author", kwargs={'pk': obj.pk})
| Handle PY3 default data conversion vagaries in unit test | Handle PY3 default data conversion vagaries in unit test
| Python | bsd-3-clause | harikvpy/django-popupcrud,harikvpy/django-popupcrud,harikvpy/django-popupcrud | ---
+++
@@ -29,7 +29,7 @@
"""
def half_age(self, author):
- return author.age/2
+ return int(author.age/2)
half_age.label = "Half Age"
def get_edit_url(self, obj): |
e28c0bd84dc8814654850b607afbdaeb669956c8 | tests/data.py | tests/data.py | # -*- coding: utf-8 -*-
import json as _json
from collections import OrderedDict as _OrderedDict
import os as _os
_thisdir = _os.path.dirname(__file__)
class MenuData(object):
_data_files = [
'data.json',
]
def __init__(self, *args, **kwargs):
dfiles = [_os.path.join(_thisdir, x) for x in self._data_files]
for fpath in dfiles:
with open(fpath, 'rb') as fdata:
dat = _json.load(fdata)
for k,v in dat.items():
setattr(self, k, v)
self.people = _OrderedDict(self.people)
| # -*- coding: utf-8 -*-
import json as _json
from collections import OrderedDict as _OrderedDict
import os as _os
_thisdir = _os.path.dirname(__file__)
class MenuData(object):
_data_files = [
'data.json',
]
def __init__(self, *args, **kwargs):
dfiles = [_os.path.join(_thisdir, x) for x in self._data_files]
for fpath in dfiles:
with open(fpath, 'rt') as fdata:
dat = _json.load(fdata)
for k,v in dat.items():
setattr(self, k, v)
self.people = _OrderedDict(self.people)
| Fix json bug for python35 in tests | Fix json bug for python35 in tests
| Python | mit | frostidaho/dynmen | ---
+++
@@ -12,7 +12,7 @@
def __init__(self, *args, **kwargs):
dfiles = [_os.path.join(_thisdir, x) for x in self._data_files]
for fpath in dfiles:
- with open(fpath, 'rb') as fdata:
+ with open(fpath, 'rt') as fdata:
dat = _json.load(fdata)
for k,v in dat.items():
setattr(self, k, v) |
52fdf6c0183233d34bc987e27a5e727b71ba09f0 | src/main/translator-xml/PMLToXML.py | src/main/translator-xml/PMLToXML.py | #!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
# TODO: Remove abs-path
return_code = subprocess.call("/opt/pml-bnfc/xml/Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
| #!/usr/bin/env/python
import sys
from os.path import isdir, split, abspath
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = abspath(pml_file.name)
xml_path = abspath(xml_file)
# Call XML generator
# TODO: Remove abs-path
return_code = subprocess.call("/opt/pml-bnfc/xml/Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def valid_xml_path(xml_path):
path, _ = split(xml_path)
return isdir(path)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=str, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
if valid_xml_path(abspath(args.xml)):
translate_pml_file(args.xml, args.pml)
else:
print "XML path is invalid."
sys.exit(1)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
| Change arg from file to path | Change arg from file to path
| Python | mit | CS4098/GroupProject,CS4098/GroupProject,CS4098/GroupProject | ---
+++
@@ -1,15 +1,15 @@
#!/usr/bin/env/python
import sys
-import os.path
+from os.path import isdir, split, abspath
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
- pml_path = os.path.abspath(pml_file.name)
- xml_path = os.path.abspath(xml_file.name)
+ pml_path = abspath(pml_file.name)
+ xml_path = abspath(xml_file)
# Call XML generator
# TODO: Remove abs-path
@@ -19,15 +19,24 @@
sys.exit(1)
+def valid_xml_path(xml_path):
+ path, _ = split(xml_path)
+ return isdir(path)
+
+
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
- parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
+ parser.add_argument('-x', '--xml', required=True, type=str, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
- translate_pml_file(args.xml, args.pml)
+ if valid_xml_path(abspath(args.xml)):
+ translate_pml_file(args.xml, args.pml)
+ else:
+ print "XML path is invalid."
+ sys.exit(1)
except IOError, msg:
parser.error(str(msg))
|
a5a90924822754b483041ba29cefeba949e72f38 | securesystemslib/gpg/exceptions.py | securesystemslib/gpg/exceptions.py | """
<Program Name>
exceptions.py
<Author>
Santiago Torres-Arias <santiago@nyu.edu>
Lukas Puehringer <lukas.puehringer@nyu.edu>
<Started>
Dec 8, 2017
<Copyright>
See LICENSE for licensing information.
<Purpose>
Define Exceptions used in the gpg package. Following the practice from
securesystemslib the names chosen for exception classes should end in
'Error' (except where there is a good reason not to).
"""
class PacketParsingError(Exception):
pass
| """
<Program Name>
exceptions.py
<Author>
Santiago Torres-Arias <santiago@nyu.edu>
Lukas Puehringer <lukas.puehringer@nyu.edu>
<Started>
Dec 8, 2017
<Copyright>
See LICENSE for licensing information.
<Purpose>
Define Exceptions used in the gpg package. Following the practice from
securesystemslib the names chosen for exception classes should end in
'Error' (except where there is a good reason not to).
"""
class PacketParsingError(Exception):
pass
class KeyNotFoundError(Exception):
pass
| Add custom KeyNotFoundError error to gpg module | Add custom KeyNotFoundError error to gpg module
| Python | mit | secure-systems-lab/securesystemslib,secure-systems-lab/securesystemslib | ---
+++
@@ -20,3 +20,6 @@
"""
class PacketParsingError(Exception):
pass
+
+class KeyNotFoundError(Exception):
+ pass |
dcbcd7434b8b4199242a479d187d2b833ca6ffcc | polling_stations/settings/constants/councils.py | polling_stations/settings/constants/councils.py | # settings for councils scraper
YVM_LA_URL = "https://www.yourvotematters.co.uk/_design/nested-content/results-page2/search-voting-locations-by-districtcode?queries_distcode_query=" # noqa
BOUNDARIES_URL = "https://ons-cache.s3.amazonaws.com/Local_Authority_Districts_April_2019_Boundaries_UK_BFE.geojson"
EC_COUNCIL_CONTACT_DETAILS_API_URL = ""
OLD_TO_NEW_MAP = {}
NEW_COUNCILS = []
| # settings for councils scraper
YVM_LA_URL = "https://www.yourvotematters.co.uk/_design/nested-content/results-page2/search-voting-locations-by-districtcode?queries_distcode_query=" # noqa
BOUNDARIES_URL = "https://ons-cache.s3.amazonaws.com/Local_Authority_Districts_April_2019_Boundaries_UK_BFE.geojson"
EC_COUNCIL_CONTACT_DETAILS_API_URL = (
"https://electoralcommission.org.uk/api/v1/data/local-authorities.json"
)
OLD_TO_NEW_MAP = {}
NEW_COUNCILS = []
| Set the EC API URL | Set the EC API URL
| Python | bsd-3-clause | DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | ---
+++
@@ -2,7 +2,9 @@
YVM_LA_URL = "https://www.yourvotematters.co.uk/_design/nested-content/results-page2/search-voting-locations-by-districtcode?queries_distcode_query=" # noqa
BOUNDARIES_URL = "https://ons-cache.s3.amazonaws.com/Local_Authority_Districts_April_2019_Boundaries_UK_BFE.geojson"
-EC_COUNCIL_CONTACT_DETAILS_API_URL = ""
+EC_COUNCIL_CONTACT_DETAILS_API_URL = (
+ "https://electoralcommission.org.uk/api/v1/data/local-authorities.json"
+)
OLD_TO_NEW_MAP = {} |
4e31e5c776c40997cccd76d4ce592d7f3d5de752 | example/runner.py | example/runner.py | #!/usr/bin/python
import argparse
import sys
def args():
parser = argparse.ArgumentParser(description='Run the Furious Examples.')
parser.add_argument('--gae-sdk-path', metavar='S', dest="gae_lib_path",
default="/usr/local/google_appengine",
help='path to the GAE SDK')
parser.add_argument('--url', metavar='U', dest="url", default="",
help="the endpoint to run")
return parser.parse_args()
def setup(options):
sys.path.insert(0, options.gae_lib_path)
from dev_appserver import fix_sys_path
fix_sys_path()
def run(options):
from google.appengine.tools import appengine_rpc
from google.appengine.tools import appcfg
source = 'furious'
user_agent = appcfg.GetUserAgent()
server = appengine_rpc.HttpRpcServer(
'localhost:8080', lambda: ('test@example.com', 'password'), user_agent,
source, secure=False)
server._DevAppServerAuthenticate()
server.Send(options.url, content_type="text/html; charset=utf-8",
payload=None)
def main():
options = args()
setup(options)
run(options)
if __name__ == "__main__":
main()
| #!/usr/bin/python
import argparse
import sys
def args():
parser = argparse.ArgumentParser(description='Run the Furious Examples.')
parser.add_argument('--gae-sdk-path', metavar='S', dest="gae_lib_path",
default="/usr/local/google_appengine",
help='path to the GAE SDK')
parser.add_argument('url', metavar='U', default="", nargs=1,
help="the endpoint to run")
return parser.parse_args()
def setup(options):
sys.path.insert(0, options.gae_lib_path)
from dev_appserver import fix_sys_path
fix_sys_path()
def run(options):
from google.appengine.tools import appengine_rpc
from google.appengine.tools import appcfg
source = 'furious'
user_agent = appcfg.GetUserAgent()
server = appengine_rpc.HttpRpcServer(
'localhost:8080', lambda: ('test@example.com', 'password'), user_agent,
source, secure=False)
url = "/"
if options.url:
url += options.url[0]
server._DevAppServerAuthenticate()
server.Send(url, content_type="text/html; charset=utf-8",
payload=None)
def main():
options = args()
setup(options)
run(options)
if __name__ == "__main__":
main()
| Update the way the url is handled. | Update the way the url is handled.
| Python | apache-2.0 | andreleblanc-wf/furious,Workiva/furious,rosshendrickson-wf/furious,beaulyddon-wf/furious,mattsanders-wf/furious,rosshendrickson-wf/furious,mattsanders-wf/furious,beaulyddon-wf/furious,andreleblanc-wf/furious,Workiva/furious | ---
+++
@@ -11,7 +11,7 @@
default="/usr/local/google_appengine",
help='path to the GAE SDK')
- parser.add_argument('--url', metavar='U', dest="url", default="",
+ parser.add_argument('url', metavar='U', default="", nargs=1,
help="the endpoint to run")
return parser.parse_args()
@@ -34,8 +34,12 @@
'localhost:8080', lambda: ('test@example.com', 'password'), user_agent,
source, secure=False)
+ url = "/"
+ if options.url:
+ url += options.url[0]
+
server._DevAppServerAuthenticate()
- server.Send(options.url, content_type="text/html; charset=utf-8",
+ server.Send(url, content_type="text/html; charset=utf-8",
payload=None)
|
24cbbd24e6398aa11956ac48282bd907806284c3 | genderbot.py | genderbot.py | import re
from twitterbot import TwitterBot
import wikipedia
class Genderbot(TwitterBot):
boring_article_regex = (r"municipality|village|town|football|genus|family|"
"administrative|district|community|region|hamlet|"
"school|actor|mountain|basketball|city|species|film|"
"county|located|politician|professional|settlement")
def tweet(self):
article = self.__random_wikipedia_article()
match = re.search(r"\bis [^.?]+", article.content, re.UNICODE)
if match:
status = self.__format_status(match.group(0), article.url)
if self.__is_interesting(status):
self.post_tweet(status)
def __format_status(self, is_phrase, url):
status = 'gender %s' % (is_phrase)
if len(status) > 114: status = status[0:113] + '...'
return status + ' %s' % (url)
def __is_interesting(self, status):
boring_match = re.search(Genderbot.boring_article_regex, status, re.UNICODE)
return boring_match is None
def __random_wikipedia_article(self):
random_title = wikipedia.random(pages=1)
return wikipedia.page(title=random_title)
if __name__ == "__main__":
try:
Genderbot("CustomGender").tweet()
except:
pass
| import re
from twitterbot import TwitterBot
import wikipedia
class Genderbot(TwitterBot):
boring_regex = (r"municipality|village|town|football|genus|family|"
"administrative|district|community|region|hamlet|"
"school|actor|mountain|basketball|city|species|film|"
"county|located|politician|professional|settlement|"
"river|lake|province|replaced|origin|band|park|song"
"approximately|north|south|east|west|business")
def tweet(self):
article = self.__random_wikipedia_article()
match = re.search(r"\bis [^.?]+", article.content, re.UNICODE)
if match:
status = self.__format_status(match.group(0), article.url)
if self.__is_interesting(status):
self.post_tweet(status)
def __format_status(self, is_phrase, url):
status = 'gender %s' % (is_phrase)
if len(status) > 114: status = status[0:113] + '...'
return status + ' %s' % (url)
def __is_interesting(self, status):
flags = re.UNICODE | re.IGNORECASE
boring = re.search(Genderbot.boring_regex, status, flags)
return boring is None
def __random_wikipedia_article(self):
random_title = wikipedia.random(pages=1)
return wikipedia.page(title=random_title)
if __name__ == "__main__":
try:
Genderbot("CustomGender").tweet()
except:
pass
| Tweak boring regex to exclude more terms | Tweak boring regex to exclude more terms
| Python | mit | DanielleSucher/genderbot | ---
+++
@@ -4,10 +4,12 @@
class Genderbot(TwitterBot):
- boring_article_regex = (r"municipality|village|town|football|genus|family|"
- "administrative|district|community|region|hamlet|"
- "school|actor|mountain|basketball|city|species|film|"
- "county|located|politician|professional|settlement")
+ boring_regex = (r"municipality|village|town|football|genus|family|"
+ "administrative|district|community|region|hamlet|"
+ "school|actor|mountain|basketball|city|species|film|"
+ "county|located|politician|professional|settlement|"
+ "river|lake|province|replaced|origin|band|park|song"
+ "approximately|north|south|east|west|business")
def tweet(self):
article = self.__random_wikipedia_article()
@@ -23,8 +25,9 @@
return status + ' %s' % (url)
def __is_interesting(self, status):
- boring_match = re.search(Genderbot.boring_article_regex, status, re.UNICODE)
- return boring_match is None
+ flags = re.UNICODE | re.IGNORECASE
+ boring = re.search(Genderbot.boring_regex, status, flags)
+ return boring is None
def __random_wikipedia_article(self):
random_title = wikipedia.random(pages=1) |
f68a3874eb9b80898a6c1acfc74e493aad5817d8 | source/services/rotten_tomatoes_service.py | source/services/rotten_tomatoes_service.py | import requests
from bs4 import BeautifulSoup
from source.models.rt_rating import RTRating
class RottenTomatoesService:
__URL = 'http://www.rottentomatoes.com/m/'
__SEPERATOR = '_'
def __init__(self, title):
self.title = title
def get_rt_rating(self):
search_url = self.__URL + self.format_title()
movie_page = requests.get(search_url)
contents = movie_page.text
soup = BeautifulSoup(contents, 'lxml')
ratings = self.get_ratings(soup)
ratings.link = search_url
return ratings
def format_title(self):
formatted_title = self.title
if formatted_title.startswith('The '):
formatted_title = formatted_title.replace('The ', '', 1)
if "'s" in formatted_title:
formatted_title = formatted_title.replace("'s", 's')
formatted_title = formatted_title.replace(' ', self.__SEPERATOR)
formatted_title = formatted_title.replace('-', '')
formatted_title = formatted_title.replace(':', '')
formatted_title = formatted_title.replace(',', '')
return formatted_title
def get_ratings(self, soup):
items = []
for item in soup.findAll(attrs={'itemprop': 'ratingValue'}):
items.append(item.get_text().strip('%'))
return RTRating(items)
| import requests
from bs4 import BeautifulSoup
from source.models.rt_rating import RTRating
class RottenTomatoesService:
__URL = 'http://www.rottentomatoes.com/m/'
__SEPERATOR = '_'
def __init__(self, title):
self.title = title
def get_rt_rating(self):
search_url = self.__URL + self.format_title()
movie_page = requests.get(search_url)
contents = movie_page.text
soup = BeautifulSoup(contents, 'lxml')
ratings = self.get_ratings(soup)
ratings.link = search_url
return ratings
def format_title(self):
formatted_title = self.title
if formatted_title.startswith('The '):
formatted_title = formatted_title.replace('The ', '', 1)
if formatted_title.startswith('A '):
formatted_title = formatted_title.replace('A ', '', 1)
if "'s" in formatted_title:
formatted_title = formatted_title.replace("'s", 's')
formatted_title = formatted_title.replace(' ', self.__SEPERATOR)
formatted_title = formatted_title.replace('-', '')
formatted_title = formatted_title.replace(':', '')
formatted_title = formatted_title.replace(',', '')
return formatted_title
def get_ratings(self, soup):
items = []
for item in soup.findAll(attrs={'itemprop': 'ratingValue'}):
items.append(item.get_text().strip('%'))
return RTRating(items)
| Remove "A" from start of title for RT search | Remove "A" from start of title for RT search
| Python | mit | jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu | ---
+++
@@ -29,6 +29,8 @@
if formatted_title.startswith('The '):
formatted_title = formatted_title.replace('The ', '', 1)
+ if formatted_title.startswith('A '):
+ formatted_title = formatted_title.replace('A ', '', 1)
if "'s" in formatted_title:
formatted_title = formatted_title.replace("'s", 's')
|
033b17a8be5be32188ca9b5f286fe023fc07d34a | frappe/utils/pdf.py | frappe/utils/pdf.py | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import pdfkit, os, frappe
from frappe.utils import scrub_urls
def get_pdf(html, options=None):
if not options:
options = {}
options.update({
"print-media-type": None,
"background": None,
"images": None,
'margin-top': '15mm',
'margin-right': '15mm',
'margin-bottom': '15mm',
'margin-left': '15mm',
'encoding': "UTF-8",
'no-outline': None
})
if not options.get("page-size"):
options['page-size'] = frappe.db.get_single_value("Print Settings", "pdf_page_size") or "A4"
html = scrub_urls(html)
fname = os.path.join("/tmp", frappe.generate_hash() + ".pdf")
pdfkit.from_string(html, fname, options=options or {})
with open(fname, "rb") as fileobj:
filedata = fileobj.read()
os.remove(fname)
return filedata
| # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import pdfkit, os, frappe
from frappe.utils import scrub_urls
def get_pdf(html, options=None):
if not options:
options = {}
options.update({
"print-media-type": None,
"background": None,
"images": None,
# 'margin-top': '10mm',
# 'margin-right': '1mm',
# 'margin-bottom': '10mm',
# 'margin-left': '1mm',
'encoding': "UTF-8",
'no-outline': None
})
if not options.get("page-size"):
options['page-size'] = frappe.db.get_single_value("Print Settings", "pdf_page_size") or "A4"
html = scrub_urls(html)
fname = os.path.join("/tmp", frappe.generate_hash() + ".pdf")
pdfkit.from_string(html, fname, options=options or {})
with open(fname, "rb") as fileobj:
filedata = fileobj.read()
os.remove(fname)
return filedata
| Remove margin constrains from PDF printing | Remove margin constrains from PDF printing
| Python | mit | BhupeshGupta/frappe,BhupeshGupta/frappe,BhupeshGupta/frappe,BhupeshGupta/frappe | ---
+++
@@ -13,10 +13,10 @@
"print-media-type": None,
"background": None,
"images": None,
- 'margin-top': '15mm',
- 'margin-right': '15mm',
- 'margin-bottom': '15mm',
- 'margin-left': '15mm',
+ # 'margin-top': '10mm',
+ # 'margin-right': '1mm',
+ # 'margin-bottom': '10mm',
+ # 'margin-left': '1mm',
'encoding': "UTF-8",
'no-outline': None
}) |
026c22ef25b28889e24f20f96ca6285289bcd46d | seven23/models/stats/middleware.py | seven23/models/stats/middleware.py |
from datetime import datetime
from seven23.models.profile.models import Profile
from rest_framework.authentication import TokenAuthentication
from seven23.models.stats.models import MonthlyActiveUser, DailyActiveUser
def active_user_middleware(get_response):
def middleware(request):
user = request.user
user_auth_tuple = TokenAuthentication().authenticate(request)
if user_auth_tuple is not None:
(user, token) = user_auth_tuple
if user.is_authenticated and not user.is_superuser:
# If user has no profile, we create on.
if not hasattr(user, 'profile'):
Profile.objects.create(user=user)
now = datetime.now()
last_api_call = user.profile.last_api_call
udpate_user = False
if now.year != last_api_call.year or now.month != last_api_call.month :
MonthlyActiveUser.objects.update_or_create(year=now.year, month=now.month)
udpate_user = True
if now.year != last_api_call.year or now.month != last_api_call.month or now.day != last_api_call.day :
DailyActiveUser.objects.update_or_create(year=now.year, month=now.month, day=now.day)
udpate_user = True
if udpate_user:
user.profile.last_api_call = now
user.save()
# Perform actual request
response = get_response(request)
return response
return middleware |
from datetime import datetime
from seven23.models.profile.models import Profile
from rest_framework.authentication import TokenAuthentication
# from seven23.models.stats.models import MonthlyActiveUser, DailyActiveUser
def active_user_middleware(get_response):
def middleware(request):
user = request.user
user_auth_tuple = TokenAuthentication().authenticate(request)
if user_auth_tuple is not None:
(user, token) = user_auth_tuple
if user.is_authenticated and not user.is_superuser:
# If user has no profile, we create on.
if not hasattr(user, 'profile'):
Profile.objects.create(user=user)
now = datetime.now()
last_api_call = user.profile.last_api_call
udpate_user = False
if now.year != last_api_call.year or now.month != last_api_call.month :
# MonthlyActiveUser.objects.update_or_create(year=now.year, month=now.month)
udpate_user = True
if now.year != last_api_call.year or now.month != last_api_call.month or now.day != last_api_call.day :
# DailyActiveUser.objects.update_or_create(year=now.year, month=now.month, day=now.day)
udpate_user = True
if udpate_user:
user.profile.last_api_call = now
user.save()
# Perform actual request
response = get_response(request)
return response
return middleware | Disable stats after crash report until we find a alternative solution | Disable stats after crash report until we find a alternative solution
| Python | mit | sebastienbarbier/723e,sebastienbarbier/723e_server,sebastienbarbier/723e_server,sebastienbarbier/723e | ---
+++
@@ -2,7 +2,7 @@
from datetime import datetime
from seven23.models.profile.models import Profile
from rest_framework.authentication import TokenAuthentication
-from seven23.models.stats.models import MonthlyActiveUser, DailyActiveUser
+# from seven23.models.stats.models import MonthlyActiveUser, DailyActiveUser
def active_user_middleware(get_response):
@@ -24,11 +24,11 @@
udpate_user = False
if now.year != last_api_call.year or now.month != last_api_call.month :
- MonthlyActiveUser.objects.update_or_create(year=now.year, month=now.month)
+ # MonthlyActiveUser.objects.update_or_create(year=now.year, month=now.month)
udpate_user = True
if now.year != last_api_call.year or now.month != last_api_call.month or now.day != last_api_call.day :
- DailyActiveUser.objects.update_or_create(year=now.year, month=now.month, day=now.day)
+ # DailyActiveUser.objects.update_or_create(year=now.year, month=now.month, day=now.day)
udpate_user = True
if udpate_user: |
500859e22bd4fda1fe55f4375642ccd5c1186d44 | d_parser/helpers/parser_extender.py | d_parser/helpers/parser_extender.py | import logging
from d_parser.helpers import url_lib
from d_parser.helpers.get_body import get_body
from helpers.config import Config
logger = logging.getLogger('ddd_site_parse')
def check_body_errors(self, grab, task):
try:
self.status_counter[str(grab.doc.code)] += 1
except KeyError:
self.status_counter[str(grab.doc.code)] = 1
if grab.doc.body == '' or grab.doc.code != 200:
err = '[{}] Code is {}, url is {}, body is {}'.format(task.name, grab.doc.code, task.url, grab.doc.body)
logger.error(err)
return True
return False
def process_error(self, grab, task, exception):
try:
self.status_counter['EXC'] += 1
except KeyError:
self.status_counter['EXC'] = 1
html = get_body(grab)
err = '[{}] Url {} parse failed (e: {}), debug: {}'.format(task.name, task.url, exception, html)
self.logger.error(err)
def common_init(self, writer, try_limit):
self.logger = logger
self.result = writer
self.status_counter = {}
self.cookie_jar = {}
self.err_limit = try_limit
self.domain = url_lib.get_host_from_url(Config.get_seq('SITE_URL')[0])
self.logger.info('Init parser ok...') | import logging
from d_parser.helpers import url_lib
from d_parser.helpers.get_body import get_body
from helpers.config import Config
logger = logging.getLogger('ddd_site_parse')
def check_body_errors(self, grab, task):
try:
self.status_counter[str(grab.doc.code)] += 1
except KeyError:
self.status_counter[str(grab.doc.code)] = 1
if grab.doc.body == '' or grab.doc.code != 200:
err = '[{}] Code is {}, url is {}, body is {}'.format(task.name, grab.doc.code, task.url, grab.doc.body)
logger.error(err)
return True
return False
def process_error(self, grab, task, exception):
try:
self.status_counter['EXC'] += 1
except KeyError:
self.status_counter['EXC'] = 1
if Config.get('APP_LOG_HTML_ERR', '') == 'True':
html = get_body(grab)
else:
html = '(skipped by config)'
self.logger.error('[{}] Url {} parse failed (e: {}), debug: {}'.format(task.name, task.url, exception, html))
def common_init(self, try_limit):
self.result = []
self.logger = logger
self.status_counter = {}
self.cookie_jar = {}
self.err_limit = try_limit
self.domain = url_lib.get_host_from_url(Config.get_seq('SITE_URL')[0])
self.logger.info('Init parser ok...')
| Add skip html log output config key | Add skip html log output config key
| Python | mit | Holovin/D_GrabDemo | ---
+++
@@ -29,14 +29,17 @@
except KeyError:
self.status_counter['EXC'] = 1
- html = get_body(grab)
- err = '[{}] Url {} parse failed (e: {}), debug: {}'.format(task.name, task.url, exception, html)
- self.logger.error(err)
+ if Config.get('APP_LOG_HTML_ERR', '') == 'True':
+ html = get_body(grab)
+ else:
+ html = '(skipped by config)'
+
+ self.logger.error('[{}] Url {} parse failed (e: {}), debug: {}'.format(task.name, task.url, exception, html))
-def common_init(self, writer, try_limit):
+def common_init(self, try_limit):
+ self.result = []
self.logger = logger
- self.result = writer
self.status_counter = {}
self.cookie_jar = {}
self.err_limit = try_limit |
2f2cef54a98e2328a638d9bbdfd2e0312606d906 | plugins/GCodeWriter/__init__.py | plugins/GCodeWriter/__init__.py | # Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"type": "mesh_writer",
"plugin": {
"name": "GCode Writer",
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("GCode Writer Plugin Description", "Writes GCode to a file")
},
"mesh_writer": {
"extension": "gcode",
"description": catalog.i18nc("GCode Writer File Description", "GCode File")
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
| # Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"type": "mesh_writer",
"plugin": {
"name": "GCode Writer",
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("GCode Writer Plugin Description", "Writes GCode to a file")
},
"mesh_writer": {
"extension": "gcode",
"description": catalog.i18nc("GCode Writer File Description", "GCode File"),
"mime_types": [
"text/x-gcode"
]
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
| Add mime types to GCodeWriter plugin | Add mime types to GCodeWriter plugin
| Python | agpl-3.0 | Curahelper/Cura,senttech/Cura,fieldOfView/Cura,hmflash/Cura,lo0ol/Ultimaker-Cura,Curahelper/Cura,markwal/Cura,ad1217/Cura,senttech/Cura,ad1217/Cura,lo0ol/Ultimaker-Cura,totalretribution/Cura,hmflash/Cura,bq/Ultimaker-Cura,ynotstartups/Wanhao,fieldOfView/Cura,totalretribution/Cura,fxtentacle/Cura,fxtentacle/Cura,ynotstartups/Wanhao,markwal/Cura,bq/Ultimaker-Cura | ---
+++
@@ -18,7 +18,10 @@
"mesh_writer": {
"extension": "gcode",
- "description": catalog.i18nc("GCode Writer File Description", "GCode File")
+ "description": catalog.i18nc("GCode Writer File Description", "GCode File"),
+ "mime_types": [
+ "text/x-gcode"
+ ]
}
}
|
87a72b219c2699e6bbb4354ae4b4f4ee356fd2c5 | plumeria/plugins/bing_images.py | plumeria/plugins/bing_images.py | from aiohttp import BasicAuth
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image.
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '10'),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(data['results'][0]['MediaUrl'])
else:
raise CommandError("no results found")
| from aiohttp import BasicAuth
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", ""i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image.
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '10'),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(data['results'][0]['MediaUrl'])
else:
raise CommandError("no results found")
| Add !images as Bing !image alias. | Add !images as Bing !image alias.
| Python | mit | sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria | ---
+++
@@ -12,7 +12,7 @@
comment="An API key from Bing")
-@commands.register("image", "i", category="Search")
+@commands.register("image", "images", ""i", category="Search")
@rate_limit()
async def image(message):
""" |
2fbc2522f473c5255e678d435689aead9116d2d3 | sieve/sieve.py | sieve/sieve.py | def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n, i))
return prime
| def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
| Fix bug where n is the square of a prime | Fix bug where n is the square of a prime
| Python | agpl-3.0 | CubicComet/exercism-python-solutions | ---
+++
@@ -6,5 +6,5 @@
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
- not_prime.update(range(i*i, n, i))
+ not_prime.update(range(i*i, n+1, i))
return prime |
4f7371dad85843c42b9cb427edebe5020586b61e | server/core/management/commands/poll_urls.py | server/core/management/commands/poll_urls.py | import datetime
import multiprocessing
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from core.models import WebPage, PageScrapeResult
from core.views import scrape_url
class Command(BaseCommand):
help = "Poll all the urls and scrape the results"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count() + 2)
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = float(int(now.strftime("%s")) - now.second)
mins_passed = curr_time - today
for page in WebPage.objects.all():
if mins_passed % page.interval == 0 or settings.DEBUG:
pool.apply_async(scrape_url, (page, ))
pool.close()
pool.join()
| import datetime
import multiprocessing
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from core.models import WebPage, PageScrapeResult
from core.views import scrape_url
class Command(BaseCommand):
help = "Poll all the urls and scrape the results"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = float(int(now.strftime("%s")) - now.second)
mins_passed = curr_time - today
for page in WebPage.objects.all():
if mins_passed % page.interval == 0 or settings.DEBUG:
pool.apply_async(scrape_url, (page, ))
pool.close()
pool.join()
| Reduce the number of processes used. | Reduce the number of processes used.
| Python | mit | theju/atifier,theju/atifier | ---
+++
@@ -13,7 +13,7 @@
can_import_settings = True
def handle(self, *args, **options):
- pool = multiprocessing.Pool(multiprocessing.cpu_count() + 2)
+ pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = float(int(now.strftime("%s")) - now.second) |
e8c6be3565bd8b33dfb7a01dfb77938534ce9d09 | pysswords/crypt.py | pysswords/crypt.py | import os
import gnupg
import logging
from .utils import which
def create_key_input(gpg, passphrase, testing=False):
key_input = gpg.gen_key_input(
name_real='Pysswords',
name_email='pysswords@pysswords',
name_comment='Autogenerated by Pysswords',
passphrase=passphrase,
testing=testing
)
return key_input
def create_gpg(binary, database_path, passphrase):
gnupg_path = os.path.join(database_path, ".gnupg")
gpg = gnupg.GPG(which(binary), homedir=gnupg_path)
gpg.gen_key(create_key_input(gpg, passphrase))
return gpg
| import os
import gnupg
import logging
from .utils import which
def create_key_input(gpg, passphrase, testing=False):
key_input = gpg.gen_key_input(
name_real='Pysswords',
name_email='pysswords@pysswords',
name_comment='Autogenerated by Pysswords',
passphrase=passphrase,
testing=testing
)
return key_input
def create_gpg(binary, database_path, passphrase):
gnupg_path = os.path.join(database_path, ".gnupg")
gpg = gnupg.GPG(which(binary), homedir=gnupg_path)
gpg.gen_key(create_key_input(gpg, passphrase))
return gpg
def load_gpg(binary, database_path):
gnupg_path = os.path.join(database_path, ".gnupg")
gpg = gnupg.GPG(which(binary), homedir=gnupg_path)
return gpg
| Add load gpg to get an instance of gpg | Add load gpg to get an instance of gpg
| Python | mit | scorphus/passpie,scorphus/passpie,marcwebbie/passpie,eiginn/passpie,marcwebbie/pysswords,marcwebbie/passpie,eiginn/passpie | ---
+++
@@ -21,3 +21,9 @@
gpg = gnupg.GPG(which(binary), homedir=gnupg_path)
gpg.gen_key(create_key_input(gpg, passphrase))
return gpg
+
+
+def load_gpg(binary, database_path):
+ gnupg_path = os.path.join(database_path, ".gnupg")
+ gpg = gnupg.GPG(which(binary), homedir=gnupg_path)
+ return gpg |
044d2e1761a6330dad326470728ea4fadceef8d8 | PropertyVerification/ContractDebugger.py | PropertyVerification/ContractDebugger.py | class ContractDebugger:
def __init__(self, pathCondGen):
self.pathCondGen = pathCondGen
def explain_failures(self, contract_name, contract, success_pcs, failed_pcs):
print("Explaining why contract fails: " + contract_name)
print(success_pcs)
print(failed_pcs)
self.get_rule_differences(success_pcs, failed_pcs)
def get_rule_differences(self, success_pcs, failed_pcs):
rules_in_success = self.get_rules(success_pcs)
rules_in_failed = self.get_rules(failed_pcs)
good_rules = [rule for rule in rules_in_success if not rule in rules_in_failed]
bad_rules = [rule for rule in rules_in_failed if not rule in rules_in_success]
print("Good rules: (Rules in success set and not failure set)")
print(good_rules)
print("Bad rules: (Rules in failure set and not success set)")
print(bad_rules)
def get_rules(self, pcs):
rules = []
for pc in pcs:
r = self.pathCondGen.rules_in_pc_real_name(pc)
print(r)
return rules
| class ContractDebugger:
def __init__(self, pathCondGen):
self.pathCondGen = pathCondGen
def explain_failures(self, contract_name, contract, success_pcs, failed_pcs):
print("Explaining why contract fails: " + contract_name)
# print("Success PCs: ")
# print(success_pcs)
# print("Failed PCs: ")
# print(failed_pcs)
self.get_rule_differences(success_pcs, failed_pcs)
def get_rule_differences(self, success_pcs, failed_pcs):
rules_in_success = self.get_rules(success_pcs)
rules_in_failed = self.get_rules(failed_pcs)
good_rules = sorted([rule for rule in rules_in_success if not rule in rules_in_failed])
bad_rules = sorted([rule for rule in rules_in_failed if not rule in rules_in_success])
print("Good rules: (Rules in success set and not failure set)")
print(good_rules)
print("Bad rules: (Rules in failure set and not success set)")
print(bad_rules)
def get_rules(self, pcs):
rules = []
for pc in pcs:
rules += self.pathCondGen.rules_in_pc_real_name(pc)
return list(set(rules))
| Print rule difference between good and bad sets. | Print rule difference between good and bad sets.
| Python | mit | levilucio/SyVOLT,levilucio/SyVOLT | ---
+++
@@ -7,8 +7,11 @@
print("Explaining why contract fails: " + contract_name)
- print(success_pcs)
- print(failed_pcs)
+ # print("Success PCs: ")
+ # print(success_pcs)
+
+ # print("Failed PCs: ")
+ # print(failed_pcs)
self.get_rule_differences(success_pcs, failed_pcs)
@@ -17,8 +20,8 @@
rules_in_success = self.get_rules(success_pcs)
rules_in_failed = self.get_rules(failed_pcs)
- good_rules = [rule for rule in rules_in_success if not rule in rules_in_failed]
- bad_rules = [rule for rule in rules_in_failed if not rule in rules_in_success]
+ good_rules = sorted([rule for rule in rules_in_success if not rule in rules_in_failed])
+ bad_rules = sorted([rule for rule in rules_in_failed if not rule in rules_in_success])
print("Good rules: (Rules in success set and not failure set)")
print(good_rules)
@@ -30,7 +33,6 @@
rules = []
for pc in pcs:
- r = self.pathCondGen.rules_in_pc_real_name(pc)
- print(r)
+ rules += self.pathCondGen.rules_in_pc_real_name(pc)
- return rules
+ return list(set(rules)) |
04bbe400396a5ef5b930b9db9d8d8e30ff6bf678 | medical_patient_ethnicity/models/medical_patient_ethnicity.py | medical_patient_ethnicity/models/medical_patient_ethnicity.py | # -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
notes = fields.Char()
code = fields.Char(required=True, )
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
| # -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
_description = 'Medical Patient Ethnicity'
notes = fields.Char()
code = fields.Char(required=True, )
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
| Add description to ethnicity model | Add description to ethnicity model
| Python | agpl-3.0 | ShaheenHossain/eagle-medical,laslabs/vertical-medical,laslabs/vertical-medical,ShaheenHossain/eagle-medical | ---
+++
@@ -25,7 +25,7 @@
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
-
+ _description = 'Medical Patient Ethnicity'
notes = fields.Char()
code = fields.Char(required=True, )
name = fields.Char(required=True, translate=True) |
10ddda3e230aa72889c81cd69792122b265010fe | rental/views/rental_state_view.py | rental/views/rental_state_view.py | from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
| from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
from rental.availability import Availability
from rental.state_transitions import allowed_transitions
from rental.models import Rental
class RentalStateView(View):
"""
Change the state of a given rental
If given an invalid state, this shows a 403 Forbidden response.
:author: Florian Stamer
"""
def check_availability(self, rental):
availability = Availability(rental.start_date, rental.return_date, rental.depot_id)
for item_rental in rental.itemrental_set:
intervals = availability.get_availability_intervals(item_rental.item)
available = availability.get_minimum_availability(intervals)
if item_rental.quantity > available:
raise ValidationError({
'quantity': 'The quantity must not exceed the availability '
'of the item in the requested time frame.'
})
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
managed_by_user = rental.depot.managed_by(request.user)
data = request.POST
state = data.get('state')
old_state = data.get('old_state')
# message = data.get('message')
if old_state != rental.state:
return HttpResponseForbidden('The state of the rental request has changed')
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
if state == Rental.STATE_APPROVED:
self.check_availability(rental)
rental.state = state
rental.save()
return redirect('rental:detail', rental_uuid=rental.uuid)
| Check availability when approving rental request | Check availability when approving rental request
| Python | agpl-3.0 | verleihtool/verleihtool,verleihtool/verleihtool,verleihtool/verleihtool,verleihtool/verleihtool | ---
+++
@@ -1,6 +1,7 @@
from django.http import HttpResponseForbidden
from django.shortcuts import redirect, get_object_or_404
from django.views import View
+from rental.availability import Availability
from rental.state_transitions import allowed_transitions
from rental.models import Rental
@@ -13,6 +14,19 @@
:author: Florian Stamer
"""
+
+ def check_availability(self, rental):
+ availability = Availability(rental.start_date, rental.return_date, rental.depot_id)
+
+ for item_rental in rental.itemrental_set:
+ intervals = availability.get_availability_intervals(item_rental.item)
+ available = availability.get_minimum_availability(intervals)
+
+ if item_rental.quantity > available:
+ raise ValidationError({
+ 'quantity': 'The quantity must not exceed the availability '
+ 'of the item in the requested time frame.'
+ })
def post(self, request, rental_uuid):
rental = get_object_or_404(Rental, pk=rental_uuid)
@@ -29,6 +43,9 @@
if state not in allowed_transitions(managed_by_user, rental.state):
return HttpResponseForbidden('Invalid state transition')
+ if state == Rental.STATE_APPROVED:
+ self.check_availability(rental)
+
rental.state = state
rental.save()
|
dc5235afec231454594201a54039869da26db576 | enactiveagents/model/perceptionhandler.py | enactiveagents/model/perceptionhandler.py | """
Module that holds classes that represent an agent's perception handler.
"""
import abc
import world
import structure
class PerceptionHandler(object):
@abc.abstractmethod
def perceive(self, agent, world):
"""
Generates a percept given an agent and a world.
:param agent: The agent to generate the percept for.
:param world: The world to generate the percept for.
:return: The percept.
"""
raise NotImplementedError("Should be implemented by child")
class EmptyPerceptionHandler(PerceptionHandler):
def perceive(self, agent, world):
return ""
class BasicPerceptionHandler(PerceptionHandler):
def perceive(self, agent_, world_):
for delta in range(1, 10):
pos = world.Position(agent_.get_position())
pos.add(agent_.get_move_delta(delta))
entities = world_.get_entities_at(pos)
if len(entities) > 0:
if isinstance(entities[0], structure.Wall):
return "w%s" % delta
return "" | """
Module that holds classes that represent an agent's perception handler.
"""
import abc
import world
import structure
class PerceptionHandler(object):
@abc.abstractmethod
def perceive(self, agent, world):
"""
Generates a percept given an agent and a world.
:param agent: The agent to generate the percept for.
:param world: The world to generate the percept for.
:return: The percept.
"""
raise NotImplementedError("Should be implemented by child")
class EmptyPerceptionHandler(PerceptionHandler):
def perceive(self, agent, world):
return ""
class BasicPerceptionHandler(PerceptionHandler):
def perceive(self, agent_, world_):
for delta in range(0, 10):
pos = world.Position(agent_.get_position())
pos.add(agent_.get_move_delta(delta))
entities = world_.get_entities_at(pos)
for entity in entities:
if entity == agent_:
continue
if isinstance(entity, structure.Wall):
return "w%s" % delta
elif isinstance(entity, structure.Block):
return "b%s" % delta
return "" | Add block structure to perception handler. Slightly change perception handler logic. | Add block structure to perception handler. Slightly change perception handler logic.
| Python | mit | Beskhue/enactive-agents,Beskhue/enactive-agents,Beskhue/enactive-agents | ---
+++
@@ -27,13 +27,18 @@
class BasicPerceptionHandler(PerceptionHandler):
def perceive(self, agent_, world_):
- for delta in range(1, 10):
+ for delta in range(0, 10):
pos = world.Position(agent_.get_position())
+
pos.add(agent_.get_move_delta(delta))
entities = world_.get_entities_at(pos)
- if len(entities) > 0:
- if isinstance(entities[0], structure.Wall):
+ for entity in entities:
+ if entity == agent_:
+ continue
+ if isinstance(entity, structure.Wall):
return "w%s" % delta
+ elif isinstance(entity, structure.Block):
+ return "b%s" % delta
return "" |
f6abaa5dba68bf010b847de0c6d37b87e5732eea | github2/commits.py | github2/commits.py | from github2.core import BaseData, GithubCommand, Attribute, DateAttribute
class Commit(BaseData):
message = Attribute("Commit message.")
parents = Attribute("List of parents for this commit.")
url = Attribute("Canonical URL for this commit.")
author = Attribute("Author metadata (dict with name/email.)")
id = Attribute("Commit ID.")
committed_date = DateAttribute("Date committed.", format="commit")
authored_data = DateAttribute("Date authored.", format="commit")
tree = Attribute("Tree SHA for this commit.")
committer = Attribute("Comitter metadata (dict with name/email.)")
added = Attribute("(If present) Datastructure representing what's been "
"added since last commit.")
removed = Attribute("(if present) Datastructure representing what's been "
"removed since last commit.")
modified = Attribute("(If present) Datastructure representing what's "
"been modified since last commit.")
class Commits(GithubCommand):
domain = "commits"
def list(self, project, branch="master", file=None):
return self.get_values("list", project, branch, file,
filter="commits", datatype=Commit)
def show(self, project, sha):
return self.get_value("show", project, sha,
filter="commit", datatype=Commit)
| from github2.core import BaseData, GithubCommand, Attribute, DateAttribute
class Commit(BaseData):
message = Attribute("Commit message.")
parents = Attribute("List of parents for this commit.")
url = Attribute("Canonical URL for this commit.")
author = Attribute("Author metadata (dict with name/email.)")
id = Attribute("Commit ID.")
committed_date = DateAttribute("Date committed.", format="commit")
authored_date = DateAttribute("Date authored.", format="commit")
tree = Attribute("Tree SHA for this commit.")
committer = Attribute("Comitter metadata (dict with name/email.)")
added = Attribute("(If present) Datastructure representing what's been "
"added since last commit.")
removed = Attribute("(if present) Datastructure representing what's been "
"removed since last commit.")
modified = Attribute("(If present) Datastructure representing what's "
"been modified since last commit.")
class Commits(GithubCommand):
domain = "commits"
def list(self, project, branch="master", file=None):
return self.get_values("list", project, branch, file,
filter="commits", datatype=Commit)
def show(self, project, sha):
return self.get_value("show", project, sha,
filter="commit", datatype=Commit)
| Fix typo authored_data -> authored_date | Fix typo authored_data -> authored_date
| Python | bsd-3-clause | ask/python-github2 | ---
+++
@@ -8,7 +8,7 @@
author = Attribute("Author metadata (dict with name/email.)")
id = Attribute("Commit ID.")
committed_date = DateAttribute("Date committed.", format="commit")
- authored_data = DateAttribute("Date authored.", format="commit")
+ authored_date = DateAttribute("Date authored.", format="commit")
tree = Attribute("Tree SHA for this commit.")
committer = Attribute("Comitter metadata (dict with name/email.)")
|
b5a21c39c37c02ea7077ce92596d68e496473af0 | grako/rendering.py | grako/rendering.py | # -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **fields):
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
if template is None:
template = self.template
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
| # -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **kwargs):
fields = ({k:v for k, v in vars(self).items() if not k.startswith('_')})
override = self.render_fields(fields)
if template is None:
if override is not None:
template = override
else:
template = self.template
fields.update(kwargs)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
| Allow override of template through return value of render_fields. | Allow override of template through return value of render_fields.
| Python | bsd-2-clause | frnknglrt/grako,vmuriart/grako | ---
+++
@@ -30,11 +30,17 @@
def render_fields(self, fields):
pass
- def render(self, template=None, **fields):
- fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
- self.render_fields(fields)
+ def render(self, template=None, **kwargs):
+ fields = ({k:v for k, v in vars(self).items() if not k.startswith('_')})
+
+ override = self.render_fields(fields)
if template is None:
- template = self.template
+ if override is not None:
+ template = override
+ else:
+ template = self.template
+
+ fields.update(kwargs)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields) |
17a42110978d2fc38daaf0e09e25da760ccdc339 | adhocracy4/emails/mixins.py | adhocracy4/emails/mixins.py | from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = finders.find('images/email_logo.png')
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
| from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
| Allow svg as email logo attachment | Allow svg as email logo attachment
| Python | agpl-3.0 | liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4 | ---
+++
@@ -11,7 +11,10 @@
"""
def get_attachments(self):
attachments = super().get_attachments()
- filename = finders.find('images/email_logo.png')
+ filename = (
+ finders.find('images/email_logo.png')
+ or finders.find('images/email_logo.svg')
+ )
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read()) |
9e7c7c17f17553d010bf61b4d8014b5f0c064aca | examples/freesolv/generate_experiments.py | examples/freesolv/generate_experiments.py | import yaml
import copy
vacuum_switching_lengths = [0, 100, 500, 1000, 5000, 10000]
solvent_switching_lengths = [500, 1000, 5000, 10000, 20000, 50000]
use_sterics = [True, False]
geometry_divisions = [90, 180, 360, 720]
# Load in template yaml:
with open("rj_hydration.yaml", "r") as templatefile:
template_yaml = yaml.load(templatefile)
# Set up vacuum simulations:
for phase in ['vacuum', 'explicit']:
if phase == "vacuum":
switching_lengths = vacuum_switching_lengths
else:
switching_lengths = solvent_switching_lengths
for switching_length in switching_lengths:
for sterics in use_sterics:
for geometry_division in geometry_divisions:
yaml_dict = copy.deepcopy(template_yaml)
specification_file_prefix = "{}_{}ncmc_{}sterics_{}geometry".format(phase, switching_length, sterics, geometry_division)
specification_filename = specification_file_prefix + ".yaml"
yaml_dict['geometry_divisions'][phase] = geometry_division
yaml_dict['use_sterics']['vacuum'] = sterics
yaml_dict['ncmc_switching_times'][phase] = switching_length
yaml_dict['phase'] = phase
yaml_dict['output_filename'] = specification_file_prefix + ".nc"
with open(specification_filename, 'w') as yam_outfile:
yaml.dump(yaml_dict, yam_outfile)
| Add script to generate the yaml files for experiments | Add script to generate the yaml files for experiments
| Python | mit | choderalab/perses,choderalab/perses | ---
+++
@@ -0,0 +1,35 @@
+import yaml
+import copy
+
+vacuum_switching_lengths = [0, 100, 500, 1000, 5000, 10000]
+solvent_switching_lengths = [500, 1000, 5000, 10000, 20000, 50000]
+
+use_sterics = [True, False]
+geometry_divisions = [90, 180, 360, 720]
+
+# Load in template yaml:
+with open("rj_hydration.yaml", "r") as templatefile:
+ template_yaml = yaml.load(templatefile)
+
+
+# Set up vacuum simulations:
+for phase in ['vacuum', 'explicit']:
+ if phase == "vacuum":
+ switching_lengths = vacuum_switching_lengths
+ else:
+ switching_lengths = solvent_switching_lengths
+
+ for switching_length in switching_lengths:
+ for sterics in use_sterics:
+ for geometry_division in geometry_divisions:
+ yaml_dict = copy.deepcopy(template_yaml)
+ specification_file_prefix = "{}_{}ncmc_{}sterics_{}geometry".format(phase, switching_length, sterics, geometry_division)
+ specification_filename = specification_file_prefix + ".yaml"
+ yaml_dict['geometry_divisions'][phase] = geometry_division
+ yaml_dict['use_sterics']['vacuum'] = sterics
+ yaml_dict['ncmc_switching_times'][phase] = switching_length
+ yaml_dict['phase'] = phase
+ yaml_dict['output_filename'] = specification_file_prefix + ".nc"
+
+ with open(specification_filename, 'w') as yam_outfile:
+ yaml.dump(yaml_dict, yam_outfile) | |
9ce7ee71b5eddc0ceff578b45c1324f8eb09ffe1 | artbot_scraper/pipelines.py | artbot_scraper/pipelines.py | # -*- coding: utf-8 -*-
from django.db import IntegrityError
from scrapy.exceptions import DropItem
from titlecase import titlecase
from dateutil import parser, relativedelta
class EventPipeline(object):
def process_item(self, item, spider):
item['titleRaw'] = item['title']
item['title'] = titlecase(item['title'])
if (item['end'] < item['start']):
item['end'] = item['end'] + relativedelta.relativedelta(years =+ 1)
try:
item.save()
except IntegrityError:
raise DropItem('Duplicate: ' + item['venue'] + ' - ' + item['title'])
return item
| # -*- coding: utf-8 -*-
from django.db import IntegrityError
from scrapy.exceptions import DropItem
from titlecase import titlecase
from dateutil import parser, relativedelta
class EventPipeline(object):
def process_item(self, item, spider):
item['titleRaw'] = item['title']
item['title'] = titlecase(item['title'])
if 'end' in item and 'start' in item:
if (item['end'] < item['start']):
item['end'] = item['end'] + relativedelta.relativedelta(years =+ 1)
try:
item.save()
except IntegrityError:
raise DropItem('Duplicate: ' + item['venue'] + ' - ' + item['title'])
return item
| Verify that event end and start keys exist before accessing. | Verify that event end and start keys exist before accessing.
| Python | mit | coreymcdermott/artbot,coreymcdermott/artbot | ---
+++
@@ -10,8 +10,9 @@
item['titleRaw'] = item['title']
item['title'] = titlecase(item['title'])
- if (item['end'] < item['start']):
- item['end'] = item['end'] + relativedelta.relativedelta(years =+ 1)
+ if 'end' in item and 'start' in item:
+ if (item['end'] < item['start']):
+ item['end'] = item['end'] + relativedelta.relativedelta(years =+ 1)
try:
item.save() |
e2a4262035e0d99c83a1bef8fdd594745a66b011 | tests/app/views/test_application.py | tests/app/views/test_application.py | from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
| from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'trackPageview'
in res.get_data(as_text=True))
| Test for presence of analytics in minified JS | Test for presence of analytics in minified JS
Minifying the JS shortens variable names wherever their scope makes it possible.
This means that `GOVUK.analytics.trackPageView` gets shortened to something like
`e.t.trackPageView`, and not in a predicatable way. Because the `trackPageView`
method could be used by indeterminable other scripts the compiler can't minify
it, so testing for just the presence of its name is safe.
| Python | mit | AusDTO/dto-digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend | ---
+++
@@ -10,5 +10,5 @@
res = self.client.get('/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
- 'GOVUK.analytics.trackPageview'
+ 'trackPageview'
in res.get_data(as_text=True)) |
4dce72f60d5575212448a7432eecc118bfd3c845 | apps/reactions/permissions.py | apps/reactions/permissions.py | from rest_framework import permissions
# TODO Add write permission for 1%CREW / Assitants.
class IsAuthorOrReadOnly(permissions.BasePermission):
"""
Custom permission to only allow author of an object to edit it.
"""
def has_permission(self, request, view, obj=None):
# Skip the check unless this is an object-level test.
if obj is None:
return True
# Read permissions are allowed to any request.
if request.method in permissions.SAFE_METHODS:
return True
# Write permissions are only allowed to the author of the reaction.
return obj.author == request.user
| from rest_framework import permissions
# TODO Add write permission for 1%CREW / Assistants.
class IsAuthorOrReadOnly(permissions.BasePermission):
"""
Custom permission to only allow author of an object to edit it.
"""
def has_permission(self, request, view, obj=None):
# Skip the check unless this is an object-level test.
if obj is None:
return True
# Read permissions are allowed to any request.
if request.method in permissions.SAFE_METHODS:
return True
# Write permissions are only allowed to the author of the reaction.
return obj.author == request.user
| Fix spelling mistake in comment. | Fix spelling mistake in comment.
| Python | bsd-3-clause | onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site | ---
+++
@@ -1,6 +1,6 @@
from rest_framework import permissions
-# TODO Add write permission for 1%CREW / Assitants.
+# TODO Add write permission for 1%CREW / Assistants.
class IsAuthorOrReadOnly(permissions.BasePermission):
"""
Custom permission to only allow author of an object to edit it. |
50f2acfcfe482c5452a80243b186ec411f672afc | boundaryservice/urls.py | boundaryservice/urls.py | from django.conf.urls.defaults import patterns, include, url
from boundaryservice.views import *
urlpatterns = patterns('',
url(r'^boundary-set/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
url(r'^boundary-set/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
url(r'^boundary/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundary/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundary/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
)
| from django.conf.urls.defaults import patterns, include, url
from boundaryservice.views import *
urlpatterns = patterns('',
url(r'^boundary-sets/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
url(r'^boundary-sets/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
url(r'^boundaries/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundaries/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
)
| Use plural names for resource types in URLs | Use plural names for resource types in URLs
| Python | mit | datamade/represent-boundaries,opencorato/represent-boundaries,opencorato/represent-boundaries,datamade/represent-boundaries,datamade/represent-boundaries,opencorato/represent-boundaries | ---
+++
@@ -3,12 +3,12 @@
from boundaryservice.views import *
urlpatterns = patterns('',
- url(r'^boundary-set/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
- url(r'^boundary-set/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
- url(r'^boundary/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
- url(r'^boundary/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
- url(r'^boundary/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
- url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
- url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
- url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
+ url(r'^boundary-sets/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
+ url(r'^boundary-sets/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
+ url(r'^boundaries/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
+ url(r'^boundaries/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
) |
1631eb5d1e009c236bdc3db1d2d44da9e9e9102a | kokki/cookbooks/busket/recipes/default.py | kokki/cookbooks/busket/recipes/default.py |
import os
from kokki import *
Package("erlang")
Package("mercurial",
provider = "kokki.providers.package.easy_install.EasyInstallProvider")
Script("install-busket",
not_if = lambda:os.path.exists(env.config.busket.path),
cwd = "/usr/local/src",
code = (
"git clone git://github.com/samuel/busket.git busket\n"
"cd busket\n"
"make release\n"
"mv rel/busket {install_path}\n"
).format(install_path=env.config.busket.path)
)
|
import os
from kokki import *
Package("erlang")
# ubuntu's erlang is a bit messed up.. remove the man link
File("/usr/lib/erlang/man",
action = "delete")
Package("mercurial",
provider = "kokki.providers.package.easy_install.EasyInstallProvider")
Script("install-busket",
not_if = lambda:os.path.exists(env.config.busket.path),
cwd = "/usr/local/src",
code = (
"git clone git://github.com/samuel/busket.git busket\n"
"cd busket\n"
"make release\n"
"mv rel/busket {install_path}\n"
).format(install_path=env.config.busket.path)
)
| Remove man link for erlang in ubuntu | Remove man link for erlang in ubuntu
| Python | bsd-3-clause | samuel/kokki | ---
+++
@@ -3,6 +3,9 @@
from kokki import *
Package("erlang")
+# ubuntu's erlang is a bit messed up.. remove the man link
+File("/usr/lib/erlang/man",
+ action = "delete")
Package("mercurial",
provider = "kokki.providers.package.easy_install.EasyInstallProvider") |
963f9ed01b400cd95e14aecdee7c265fe48a4d41 | mopidy_nad/__init__.py | mopidy_nad/__init__.py | import os
import pkg_resources
from mopidy import config, ext
__version__ = pkg_resources.get_distribution("Mopidy-NAD").version
class Extension(ext.Extension):
dist_name = "Mopidy-NAD"
ext_name = "nad"
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["port"] = config.String()
schema["source"] = config.String(optional=True)
schema["speakers-a"] = config.Boolean(optional=True)
schema["speakers-b"] = config.Boolean(optional=True)
return schema
def setup(self, registry):
from mopidy_nad.mixer import NadMixer
registry.add("mixer", NadMixer)
| import pathlib
import pkg_resources
from mopidy import config, ext
__version__ = pkg_resources.get_distribution("Mopidy-NAD").version
class Extension(ext.Extension):
dist_name = "Mopidy-NAD"
ext_name = "nad"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["port"] = config.String()
schema["source"] = config.String(optional=True)
schema["speakers-a"] = config.Boolean(optional=True)
schema["speakers-b"] = config.Boolean(optional=True)
return schema
def setup(self, registry):
from mopidy_nad.mixer import NadMixer
registry.add("mixer", NadMixer)
| Use pathlib to read ext.conf | Use pathlib to read ext.conf
| Python | apache-2.0 | mopidy/mopidy-nad | ---
+++
@@ -1,4 +1,4 @@
-import os
+import pathlib
import pkg_resources
@@ -13,8 +13,7 @@
version = __version__
def get_default_config(self):
- conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
- return config.read(conf_file)
+ return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema() |
334e13b39945dab3e2c03752f32af0de5e382e9d | base/consensus.py | base/consensus.py | from abc import ABCMeta, abstractmethod
class Consensus(metaclass=ABCMeta):
"""
An interface for defining a consensus protocol.
The 'propose' and 'decide' methods need to be defined
"""
@abstractmethod
def propose(self, message):
#raise NotImplementedError("Method 'propose' needs to be implemented")
pass
@abstractmethod
def decide(self):
#raise NotImplementedError("Method 'decide' needs to be implemented")
pass | Add abstract Consensus protocol class | Add abstract Consensus protocol class
| Python | mit | koevskinikola/ByzantineRandomizedConsensus | ---
+++
@@ -0,0 +1,17 @@
+from abc import ABCMeta, abstractmethod
+
+class Consensus(metaclass=ABCMeta):
+ """
+ An interface for defining a consensus protocol.
+ The 'propose' and 'decide' methods need to be defined
+ """
+
+ @abstractmethod
+ def propose(self, message):
+ #raise NotImplementedError("Method 'propose' needs to be implemented")
+ pass
+
+ @abstractmethod
+ def decide(self):
+ #raise NotImplementedError("Method 'decide' needs to be implemented")
+ pass | |
cd9a32c9c6ff2adc9e85fe471c30cf555b8871b0 | tests/abm/test_pops.py | tests/abm/test_pops.py | # -*- coding: utf-8 -*-
"""
test_pops
~~~~~~~~~
tests for population code
"""
from abm import pops
from abm.entities import Task
import pytest
from scipy.stats.distributions import uniform
import numpy as np
@pytest.fixture
def basicenv():
return pops.Environment()
@pytest.mark.unit
def test_distribute_awards(basicenv):
class MockEntity(object):
total_award = 0
def award(self, amount):
self.total_award += amount
basicenv.population = []
for i in range(6):
basicenv.population.append(MockEntity())
basicenv.path = [1, 2, 3, 2, 4]
task = Task(4)
basicenv._distribute_awards(task)
observed = [
x.total_award for x in basicenv.population
]
expected = ([0] + [.25] * 3 + [0, 0])
assert observed == expected
assert np.isclose(sum(observed), .75)
basicenv.path = [0, 4]
basicenv._distribute_awards(task)
assert basicenv.population[0].total_award == 1.
basicenv.path = [5] * (basicenv.path_cutoff + 1)
basicenv._distribute_awards(task)
assert basicenv.population[5].total_award == -.05
| # -*- coding: utf-8 -*-
"""
test_pops
~~~~~~~~~
tests for population code
"""
from abm import pops
from abm.entities import Task
import pytest
from scipy.stats.distributions import uniform
import numpy as np
@pytest.fixture
def basicenv():
return pops.Environment()
@pytest.mark.unit
def test_distribute_awards(basicenv):
class MockEntity(object):
total_award = 0
def award(self, amount):
self.total_award += amount
basicenv.population = []
for i in range(6):
basicenv.population.append(MockEntity())
basicenv.path = [1, 2, 3, 2, 4]
task = Task(4)
basicenv._distribute_awards(task)
observed = [
x.total_award for x in basicenv.population
]
expected = ([0] + [.25] * 3 + [0, 0])
assert observed == expected
assert np.isclose(sum(observed), .75)
basicenv.path = [0, 4]
basicenv._distribute_awards(task)
assert basicenv.population[0].total_award == 1.
basicenv.path = [5] * (basicenv.path_cutoff + 1)
basicenv._distribute_awards(task)
assert basicenv.population[5].total_award == -.05
| Add newline at the end of the file for my sanity. | Add newline at the end of the file for my sanity.
| Python | mit | bhtucker/agents | ---
+++
@@ -48,4 +48,3 @@
basicenv.path = [5] * (basicenv.path_cutoff + 1)
basicenv._distribute_awards(task)
assert basicenv.population[5].total_award == -.05
- |
d3130c380c4b0621bbb9d9a990df91850bccb16b | comics/comics/crfh.py | comics/comics/crfh.py | from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Colleges Roomies from Hell"
language = "en"
url = "http://www.crfh.net/"
start_date = "1999-01-01"
rights = "Maritza Campos"
class Crawler(CrawlerBase):
history_capable_date = "1999-01-01"
time_zone = "America/Merida"
def crawl(self, pub_date):
page_url = "http://www.crfh.net/d2/%s.html" % (
pub_date.strftime("%Y%m%d"),
)
page = self.parse_page(page_url)
url = page.src('img[src*="crfh%s"]' % pub_date.strftime("%Y%m%d"))
url = url.replace("\n", "")
return CrawlerImage(url)
| from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Colleges Roomies from Hell"
language = "en"
url = "http://www.crfh.net/"
start_date = "1999-01-01"
rights = "Maritza Campos"
class Crawler(CrawlerBase):
history_capable_date = "1999-01-01"
time_zone = "America/Merida"
def crawl(self, pub_date):
page_url = "http://www.crfh.net/d/%s.html" % (
pub_date.strftime("%Y%m%d"),
)
page = self.parse_page(page_url)
url = page.src('img[src*="crfh%s"]' % pub_date.strftime("%Y%m%d"))
url = url.replace("\n", "")
return CrawlerImage(url)
| Update "Colleges Roomies from Hell" after site change | Update "Colleges Roomies from Hell" after site change
| Python | agpl-3.0 | datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics | ---
+++
@@ -15,7 +15,7 @@
time_zone = "America/Merida"
def crawl(self, pub_date):
- page_url = "http://www.crfh.net/d2/%s.html" % (
+ page_url = "http://www.crfh.net/d/%s.html" % (
pub_date.strftime("%Y%m%d"),
)
page = self.parse_page(page_url) |
33f7e94385a8d4fbba797fc81b2565906604c9a4 | src/zeit/content/cp/browser/area.py | src/zeit/content/cp/browser/area.py | # Copyright (c) 2009-2010 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.cp.browser.blocks.teaser
import zeit.content.cp.interfaces
import zeit.edit.browser.block
import zeit.edit.browser.view
import zope.formlib.form
class ViewletManager(zeit.edit.browser.block.BlockViewletManager):
@property
def css_class(self):
classes = super(ViewletManager, self).css_class
return ' '.join(['editable-area', classes])
class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout):
interface = zeit.content.cp.interfaces.IArea
layout_prefix = 'teaserbar' # XXX should be area
layouts = ()
form_fields = zope.formlib.form.Fields() # XXX implement me
class EditCommon(zeit.edit.browser.view.EditBox):
form_fields = zope.formlib.form.Fields(
zeit.content.cp.interfaces.IArea).select(
'supertitle', 'teaserText', 'background_color')
form_fields['background_color'].custom_widget = (
zeit.cms.browser.widget.ColorpickerWidget)
| # Copyright (c) 2009-2010 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.cp.browser.blocks.teaser
import zeit.content.cp.interfaces
import zeit.edit.browser.block
import zeit.edit.browser.view
import zope.formlib.form
class ViewletManager(zeit.edit.browser.block.BlockViewletManager):
@property
def css_class(self):
classes = super(ViewletManager, self).css_class
return ' '.join(['editable-area', classes])
class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout):
interface = zeit.content.cp.interfaces.IArea
layout_prefix = 'teaserbar' # XXX should be area
layouts = ()
class EditCommon(zeit.edit.browser.view.EditBox):
form_fields = zope.formlib.form.Fields(
zeit.content.cp.interfaces.IArea).select(
'supertitle', 'teaserText', 'background_color')
form_fields['background_color'].custom_widget = (
zeit.cms.browser.widget.ColorpickerWidget)
| Remove field that has now the same default implementation on it's super class. | Remove field that has now the same default implementation on it's super class.
| Python | bsd-3-clause | ZeitOnline/zeit.content.cp,ZeitOnline/zeit.content.cp | ---
+++
@@ -22,8 +22,6 @@
layout_prefix = 'teaserbar' # XXX should be area
layouts = ()
- form_fields = zope.formlib.form.Fields() # XXX implement me
-
class EditCommon(zeit.edit.browser.view.EditBox):
|
a802501943757dd85ce66a11fcd7ae40c0239462 | datastructures.py | datastructures.py | #!/usr/bin/env python3
class Triangle:
"""A class structure for storing and minipulating a triangle.
The trianlge is represented as a 3-tuple of points. Each point is
represented as a 2-tuple of floats, the first element being the
x-coordinate and the second element being the y-coordinate.
Several useful operations can be applied to a triangle such as, rotate,
translate, split across altitude, and rectanglify.
The Triangle (and underlying tuple) should be treated as an immutable
data structure. All methods return a new triangle and do not modify the
existing one."""
def __init__(self, tpl):
"""tpl is a 3-tuple of coordinates"""
self.points = tpl
class Shape:
"""A class structure for representing and minipulating arbitary shapes.
A shape is defines as a list of triangles (see Triangle). Several
operations can be applied to a shape such as rotation, translation and
splitting the shape into two.
This object should be treated as an immutable data structure. All methods
return new shapes and do not modify the existing one."""
def __init__(self, triangle_list):
"""triangle_list is a list of triangles"""
self.triangles = triangle_list
| #!/usr/bin/env python3
import math
class Triangle:
"""A class structure for storing and minipulating a triangle.
The trianlge is represented as a 3-tuple of points. Each point is
represented as a 2-tuple of floats, the first element being the
x-coordinate and the second element being the y-coordinate.
Several useful operations can be applied to a triangle such as, rotate,
translate, split across altitude, and rectanglify.
The Triangle (and underlying tuple) should be treated as an immutable
data structure. All methods return a new triangle and do not modify the
existing one."""
def __init__(self, tpl):
"""tpl is a 3-tuple of coordinates"""
self.points = tpl
def rotate(self, pivot, rangle):
"""Return a new triangle rotate clockwise (by angle) around pivot.
pivot -- A coordinate pair
rangle -- The angle to rotate by in radians"""
new_points = list()
px, py = pivot
for x, y in self.points:
dx, dy = x - px, y - py
current_angle = math.atan2(dy, dx)
total_angle = current_angle + rangle
r = math.hypot(dx, dy)
nx = r*math.cos(total_angle) + px
ny = r*math.sin(total_angle) + py
new_points.append((nx, ny))
return Triangle(tuple(new_points))
class Shape:
"""A class structure for representing and minipulating arbitary shapes.
A shape is defines as a list of triangles (see Triangle). Several
operations can be applied to a shape such as rotation, translation and
splitting the shape into two.
This object should be treated as an immutable data structure. All methods
return new shapes and do not modify the existing one."""
def __init__(self, triangle_list):
"""triangle_list is a list of triangles"""
self.triangles = triangle_list
| Add method to rotate triangle | Add method to rotate triangle
| Python | mit | moyamo/polygon2square | ---
+++
@@ -1,4 +1,6 @@
#!/usr/bin/env python3
+
+import math
class Triangle:
"""A class structure for storing and minipulating a triangle.
@@ -17,7 +19,24 @@
def __init__(self, tpl):
"""tpl is a 3-tuple of coordinates"""
self.points = tpl
+
+ def rotate(self, pivot, rangle):
+ """Return a new triangle rotate clockwise (by angle) around pivot.
+ pivot -- A coordinate pair
+ rangle -- The angle to rotate by in radians"""
+ new_points = list()
+ px, py = pivot
+ for x, y in self.points:
+ dx, dy = x - px, y - py
+ current_angle = math.atan2(dy, dx)
+ total_angle = current_angle + rangle
+ r = math.hypot(dx, dy)
+ nx = r*math.cos(total_angle) + px
+ ny = r*math.sin(total_angle) + py
+ new_points.append((nx, ny))
+ return Triangle(tuple(new_points))
+
class Shape:
"""A class structure for representing and minipulating arbitary shapes. |
dfcb61ef1187f9d3cf80ffc55ad8aceafb0b29b3 | djoauth2/helpers.py | djoauth2/helpers.py | # coding: utf-8
import random
import urlparse
from string import ascii_letters, digits
from urllib import urlencode
# From http://tools.ietf.org/html/rfc6750#section-2.1
BEARER_TOKEN_CHARSET = ascii_letters + digits + '-._~+/'
def random_hash(length):
return ''.join(random.sample(BEARER_TOKEN_CHARSET, length))
def random_hash_generator(length):
return lambda: random_hash(length)
def update_parameters(url, parameters):
""" Updates a URL's existing GET parameters.
@url: a URL string.
@parameters: a dictionary of parameters, {string:string}.
"""
parsed_url = urlparse(url)
query_parameters = urlparse.parse_qsl(parsed_url.query)
parsed_url.query = urlencode(query_parameters + parameters.items())
return urlparse.urlunparse(parsed_url)
| # coding: utf-8
import random
import urlparse
from string import ascii_letters, digits
from urllib import urlencode
# From http://tools.ietf.org/html/rfc6750#section-2.1
BEARER_TOKEN_CHARSET = ascii_letters + digits + '-._~+/'
def random_hash(length):
return ''.join(random.sample(BEARER_TOKEN_CHARSET, length))
def random_hash_generator(length):
return lambda: random_hash(length)
def update_parameters(url, parameters):
""" Updates a URL's existing GET parameters.
@url: a URL string.
@parameters: a dictionary of parameters, {string:string}.
"""
parsed_url = urlparse.urlparse(url)
existing_query_parameters = urlparse.parse_qsl(parsed_url.query)
# Read http://docs.python.org/2/library/urlparse.html#urlparse.urlparse
# if this is confusing.
return urlparse.urlunparse((
parsed_url.scheme,
parsed_url.netloc,
parsed_url.path,
parsed_url.params,
urlencode(existing_query_parameters + parameters.items()),
parsed_url.fragment
))
| Fix query string update helper. | Fix query string update helper.
| Python | mit | seler/djoauth2,vden/djoauth2-ng,Locu/djoauth2,Locu/djoauth2,seler/djoauth2,vden/djoauth2-ng | ---
+++
@@ -22,10 +22,16 @@
@url: a URL string.
@parameters: a dictionary of parameters, {string:string}.
"""
- parsed_url = urlparse(url)
+ parsed_url = urlparse.urlparse(url)
+ existing_query_parameters = urlparse.parse_qsl(parsed_url.query)
+ # Read http://docs.python.org/2/library/urlparse.html#urlparse.urlparse
+ # if this is confusing.
+ return urlparse.urlunparse((
+ parsed_url.scheme,
+ parsed_url.netloc,
+ parsed_url.path,
+ parsed_url.params,
+ urlencode(existing_query_parameters + parameters.items()),
+ parsed_url.fragment
+ ))
- query_parameters = urlparse.parse_qsl(parsed_url.query)
- parsed_url.query = urlencode(query_parameters + parameters.items())
-
- return urlparse.urlunparse(parsed_url)
- |
986901c9e91d44758200fb8d3264b88c0977be37 | lvsr/configs/timit_bothgru_hybrid2.py | lvsr/configs/timit_bothgru_hybrid2.py | Config(
net=Config(attention_type='hybrid2',
shift_predictor_dims=[100],
max_left=10,
max_right=100),
initialization=[
("/recognizer", "rec_weights_init", "IsotropicGaussian(0.1)"),
("/recognizer/generator/att_trans/hybrid_att/loc_att",
"weights_init", "IsotropicGaussian(0.01)"),
("/recognizer/generator/att_trans/hybrid_att/loc_att",
"biases_init", "IsotropicGaussian(5.0)")],
data=Config(normalization="norm.pkl"))
| Config(
net=Config(dec_transition='GatedRecurrent',
enc_transition='GatedRecurrent',
attention_type='hybrid2',
shift_predictor_dims=[100],
max_left=10,
max_right=100),
initialization=[
("/recognizer", "rec_weights_init", "IsotropicGaussian(0.1)"),
("/recognizer/generator/att_trans/hybrid_att/loc_att",
"weights_init", "IsotropicGaussian(0.001)"),
("/recognizer/generator/att_trans/hybrid_att/loc_att",
"biases_init", "IsotropicGaussian(5.0)")],
data=Config(normalization="norm.pkl"))
| Fix hybrid2, but it is still no use | Fix hybrid2, but it is still no use
| Python | mit | nke001/attention-lvcsr,rizar/attention-lvcsr,rizar/attention-lvcsr,nke001/attention-lvcsr,nke001/attention-lvcsr,rizar/attention-lvcsr,nke001/attention-lvcsr,rizar/attention-lvcsr,rizar/attention-lvcsr,nke001/attention-lvcsr | ---
+++
@@ -1,12 +1,14 @@
Config(
- net=Config(attention_type='hybrid2',
+ net=Config(dec_transition='GatedRecurrent',
+ enc_transition='GatedRecurrent',
+ attention_type='hybrid2',
shift_predictor_dims=[100],
max_left=10,
max_right=100),
initialization=[
("/recognizer", "rec_weights_init", "IsotropicGaussian(0.1)"),
("/recognizer/generator/att_trans/hybrid_att/loc_att",
- "weights_init", "IsotropicGaussian(0.01)"),
+ "weights_init", "IsotropicGaussian(0.001)"),
("/recognizer/generator/att_trans/hybrid_att/loc_att",
"biases_init", "IsotropicGaussian(5.0)")],
data=Config(normalization="norm.pkl")) |
d48946c89b4436fad97fdee65e34d7ca77f58d95 | modules/base.py | modules/base.py | #-*- coding: utf-8 -*-
import pandas as pd
import pandas_datareader.data as web
import datetime
import config
import os
import re
import pickle
def get_file_path(code):
return os.path.join(config.DATA_PATH, 'data', code + '.pkl')
def download(code, year1, month1, day1, year2, month2, day2):
start = datetime.datetime(year1, month1, day1)
end = datetime.datetime(year2, month2, day2)
df = web.DataReader('%s.KS' % code, 'yahoo', start, end)
save(code, df)
return df
def load(code):
try:
return pd.read_pickle(code)
except:
pass
return None
def save(code, df):
df.to_pickle(code)
def dump(code, df):
with open(get_file_path(code), 'wb') as handle:
pickle.dump(df, handle)
| #-*- coding: utf-8 -*-
import pandas as pd
import pandas_datareader.data as web
import datetime
import config
import os
import re
import pickle
def get_file_path(code):
if not os.path.exists(config.DATA_PATH):
try:
os.makedirs(config.DATA_PATH)
except:
pass
return os.path.join(config.DATA_PATH, 'data', code + '.pkl')
def download(code, year1, month1, day1, year2, month2, day2):
start = datetime.datetime(year1, month1, day1)
end = datetime.datetime(year2, month2, day2)
df = web.DataReader('%s.KS' % code, 'yahoo', start, end)
save(code, df)
return df
def load(code):
try:
return pd.read_pickle(code)
except:
pass
return None
def save(code, df):
df.to_pickle(code)
def dump(code, df):
with open(get_file_path(code), 'wb') as handle:
pickle.dump(df, handle)
| Fix the FileNotFoundError when data director is not exist | Fix the FileNotFoundError when data director is not exist
| Python | mit | jongha/stock-ai,jongha/stock-ai,jongha/stock-ai,jongha/stock-ai | ---
+++
@@ -8,6 +8,12 @@
import pickle
def get_file_path(code):
+ if not os.path.exists(config.DATA_PATH):
+ try:
+ os.makedirs(config.DATA_PATH)
+ except:
+ pass
+
return os.path.join(config.DATA_PATH, 'data', code + '.pkl')
def download(code, year1, month1, day1, year2, month2, day2): |
785c154cb97dcf8bbdc9c3ad5d4da6049bf7155c | web_blog.py | web_blog.py | # -*- coding: utf-8 -*-
from flask import Flask
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR(127) NOT NULL,
text VARCHAR(10000) NOT NULL,
created TIMESTAMP NOT NULL,
)
"""
app = FLask(__name__)
@app.route('/')
def hello():
return u'Hello World!'
if __name__ == '__main__':
app.run(debug=True)
| # -*- coding: utf-8 -*-
from flask import Flask
import os
import psycopg2
from contextlib import closing
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR(127) NOT NULL,
text VARCHAR(10000) NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'DATABASE_URL', 'dbname=web_blog user=store'
)
def connect_db():
"""Return a connection to the database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""Initialize the database
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
@app.route('/')
def hello():
return u'Hello World!'
if __name__ == '__main__':
app.run(debug=True)
| Add functionality to connect to DB server and initialize our blog's database | Add functionality to connect to DB server and initialize our blog's database
| Python | mit | charlieRode/web_blog | ---
+++
@@ -1,5 +1,8 @@
# -*- coding: utf-8 -*-
from flask import Flask
+import os
+import psycopg2
+from contextlib import closing
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
@@ -7,11 +10,29 @@
id serial PRIMARY KEY,
title VARCHAR(127) NOT NULL,
text VARCHAR(10000) NOT NULL,
- created TIMESTAMP NOT NULL,
+ created TIMESTAMP NOT NULL
)
"""
-app = FLask(__name__)
+app = Flask(__name__)
+
+app.config['DATABASE'] = os.environ.get(
+ 'DATABASE_URL', 'dbname=web_blog user=store'
+)
+
+def connect_db():
+ """Return a connection to the database"""
+ return psycopg2.connect(app.config['DATABASE'])
+
+
+def init_db():
+ """Initialize the database
+ WARNING: executing this function will drop existing tables.
+ """
+ with closing(connect_db()) as db:
+ db.cursor().execute(DB_SCHEMA)
+ db.commit()
+
@app.route('/')
def hello(): |
ae5c29e06ce110de1c44ffc4c466a4c611007d22 | spyder_unittest/widgets/tests/__init__.py | spyder_unittest/widgets/tests/__init__.py | # -*- coding: utf-8 -*-
#
# Copyright © 2017 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# noqa: D104
| Add copyright notice, skip docstring checks | Add copyright notice, skip docstring checks | Python | mit | jitseniesen/spyder-unittest | ---
+++
@@ -0,0 +1,7 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright © 2017 Spyder Project Contributors
+# Licensed under the terms of the MIT License
+# (see LICENSE.txt for details)
+
+# noqa: D104 | |
9492441a3587f7257d6295ebcb93a3e20c16a1d2 | opensrs/models.py | opensrs/models.py | from dateutil.parser import parse
class Domain(object):
def __init__(self, data):
self.name = data['name']
self.auto_renew = (data['f_auto_renew'] == 'Y')
self.expiry_date = parse(data['expiredate']).date()
@property
def tld(self):
return self.name.rsplit('.', 1)[1]
| from dateutil.parser import parse
class Domain(object):
def __init__(self, data):
self.name = data['name']
self.auto_renew = (data['f_auto_renew'] == 'Y')
self.expiry_date = parse(data['expiredate']).date()
@property
def tld(self):
return self.name.split('.')[-1]
| Use more concise way to get tld | Use more concise way to get tld
| Python | mit | yola/opensrs,yola/opensrs | ---
+++
@@ -9,4 +9,4 @@
@property
def tld(self):
- return self.name.rsplit('.', 1)[1]
+ return self.name.split('.')[-1] |
c1bafcaa2c826ab450bd7a5e77a48fd742098e19 | trex/serializers.py | trex/serializers.py | # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project")
| # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryTagsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("url", "id", "name")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project", "tags")
| Add EntryTagsSerializer for returning tags of an Entry | Add EntryTagsSerializer for returning tags of an Entry
| Python | mit | bjoernricks/trex,bjoernricks/trex | ---
+++
@@ -9,7 +9,7 @@
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
-from trex.models.project import Project, Entry
+from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
@@ -28,6 +28,13 @@
fields = ("id", "name", "description", "active", "created", "entries")
+class EntryTagsSerializer(HyperlinkedModelSerializer):
+
+ class Meta:
+ model = Tag
+ fields = ("url", "id", "name")
+
+
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
@@ -37,9 +44,10 @@
class EntryDetailSerializer(HyperlinkedModelSerializer):
+ tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
- "user_abbr", "user", "created", "project")
+ "user_abbr", "user", "created", "project", "tags") |
1add854d3855dade0d6eea9a2740e8233d02cf6b | amitgroup/features/edge_descriptor.py | amitgroup/features/edge_descriptor.py |
from binary_descriptor import BinaryDescriptor
import amitgroup as ag
import amitgroup.features
# TODO: This is temporarily moved
#@BinaryDescriptor.register('edges')
class EdgeDescriptor(BinaryDescriptor):
"""
Binary descriptor based on edges.
The parameters are similar to :func:`amitgroup.features.bedges`.
Parameters
----------
polarity_sensitive : bool
If True, the polarity of the edges will matter. If False, then the direction of edges will not matter.
k : int
See :func:`amitgroup.features.bedges`.
radius : int
Radius of edge spreading. See :func:`amitgroup.features.bedges`.
min_contrast : float
See :func:`amitgroup.features.bedges`.
"""
def __init__(self, polarity_sensitive=True, k=5, radius=1, min_contrast=0.1):
self.settings = {}
# Change this
self.settings['contrast_insensitive'] = polarity_sensitive
self.settings['k'] = k
self.settings['radius'] = radius
self.settings['min_contrast'] = min_contrast
self.settings.update(settings)
def extract_features(self, img):
#return ag.features.bedges_from_image(img, **self.settings)
return ag.features.bedges(img, **self.settings)
def save_to_dict(self):
return self.settings
@classmethod
def load_from_dict(cls, d):
return cls(d)
EdgeDescriptor = BinaryDescriptor.register('edges')(EdgeDescriptor)
|
from binary_descriptor import BinaryDescriptor
import amitgroup as ag
import amitgroup.features
# TODO: This is temporarily moved
#@BinaryDescriptor.register('edges')
class EdgeDescriptor(BinaryDescriptor):
"""
Binary descriptor based on edges.
The parameters are similar to :func:`amitgroup.features.bedges`.
Parameters
----------
polarity_sensitive : bool
If True, the polarity of the edges will matter. If False, then the direction of edges will not matter.
k : int
See :func:`amitgroup.features.bedges`.
radius : int
Radius of edge spreading. See :func:`amitgroup.features.bedges`.
min_contrast : float
See :func:`amitgroup.features.bedges`.
"""
def __init__(self, polarity_sensitive=True, k=5, radius=1, min_contrast=0.1):
self.settings = {}
# Change this
self.settings['contrast_insensitive'] = not polarity_sensitive
self.settings['k'] = k
self.settings['radius'] = radius
self.settings['min_contrast'] = min_contrast
self.settings.update(settings)
def extract_features(self, img):
#return ag.features.bedges_from_image(img, **self.settings)
return ag.features.bedges(img, **self.settings)
def save_to_dict(self):
return self.settings
@classmethod
def load_from_dict(cls, d):
return cls(d)
EdgeDescriptor = BinaryDescriptor.register('edges')(EdgeDescriptor)
| Fix bug in EdgeDescriptor setting. | Fix bug in EdgeDescriptor setting.
| Python | bsd-3-clause | amitgroup/amitgroup | ---
+++
@@ -25,7 +25,7 @@
def __init__(self, polarity_sensitive=True, k=5, radius=1, min_contrast=0.1):
self.settings = {}
# Change this
- self.settings['contrast_insensitive'] = polarity_sensitive
+ self.settings['contrast_insensitive'] = not polarity_sensitive
self.settings['k'] = k
self.settings['radius'] = radius
self.settings['min_contrast'] = min_contrast |
a2589c5203c90b3b8b5cc504da36708038e0eb58 | links/maker/urls.py | links/maker/urls.py | from django.conf.urls import patterns, url
from maker.views import (RegsitrationView,
AuthenticationView,
MakerSelfView,
MakerProfileView,
ResetPasswordRequestView,
ResetPasswordProcessView,
ChangePasswordView,
EmailChangeRequestView,
EmailChangeProcessView)
urlpatterns = patterns(
'',
url(
r'^/?$',
MakerSelfView.as_view(),
name='maker-self'
),
url(
r'^register/?$',
RegsitrationView.as_view(),
name='registration'
),
url(
r'^authenticate/?$',
AuthenticationView.as_view(),
name='authentication'
),
url(
r'^password/?$',
ChangePasswordView.as_view(),
name='change-password'
),
url(
r'^password/reset/?$',
ResetPasswordRequestView.as_view(),
name='password-reset'
),
url(
r'^password/reset/update?$',
ResetPasswordProcessView.as_view(),
name='password-reset-process'
),
url(
r'^email/?$',
EmailChangeRequestView.as_view(),
name='email-change-request'
),
url(
r'^email/update?$',
EmailChangeProcessView.as_view(),
name='email-change-process'
),
url(
r'^(?P<pk>[0-9]+)/?$',
MakerProfileView.as_view(),
name='maker-profile-view'
)
)
| from django.conf.urls import patterns, url
from maker.views import (RegsitrationView,
AuthenticationView,
MakerSelfView,
MakerProfileView,
ResetPasswordRequestView,
ResetPasswordProcessView,
ChangePasswordView,
EmailChangeRequestView,
EmailChangeProcessView)
urlpatterns = patterns(
'',
url(
r'^self/?$',
MakerSelfView.as_view(),
name='maker-self'
),
url(
r'^register/?$',
RegsitrationView.as_view(),
name='registration'
),
url(
r'^authenticate/?$',
AuthenticationView.as_view(),
name='authentication'
),
url(
r'^password/?$',
ChangePasswordView.as_view(),
name='change-password'
),
url(
r'^password/reset/?$',
ResetPasswordRequestView.as_view(),
name='password-reset'
),
url(
r'^password/reset/update?$',
ResetPasswordProcessView.as_view(),
name='password-reset-process'
),
url(
r'^email/?$',
EmailChangeRequestView.as_view(),
name='email-change-request'
),
url(
r'^email/update?$',
EmailChangeProcessView.as_view(),
name='email-change-process'
),
url(
r'^(?P<pk>[0-9]+)/?$',
MakerProfileView.as_view(),
name='maker-profile-view'
)
)
| Change maker self URL name | Change maker self URL name
| Python | mit | projectweekend/Links-API,projectweekend/Links-API | ---
+++
@@ -14,7 +14,7 @@
urlpatterns = patterns(
'',
url(
- r'^/?$',
+ r'^self/?$',
MakerSelfView.as_view(),
name='maker-self'
), |
332ed6c26830bf2ac8e154948c4c58b745d5b5ae | cosmo_tester/test_suites/snapshots/conftest.py | cosmo_tester/test_suites/snapshots/conftest.py | import pytest
from cosmo_tester.framework.test_hosts import Hosts, get_image
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=4,
)
hosts.instances[0] = get_image(request.param, test_config)
hosts.instances[1] = get_image('master', test_config)
hosts.instances[2] = get_image('centos', test_config)
hosts.instances[3] = get_image('centos', test_config)
win_vm = hosts.instances[2]
win_vm.prepare_for_windows('windows_2012')
lin_vm = hosts.instances[3]
lin_vm.image_name = test_config.platform['centos_7_image']
lin_vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
| import pytest
from cosmo_tester.framework.test_hosts import Hosts, get_image
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=4,
)
hosts.instances[0] = get_image(request.param, test_config)
hosts.instances[1] = get_image('master', test_config)
hosts.instances[2] = get_image('centos', test_config)
hosts.instances[3] = get_image('centos', test_config)
win_vm = hosts.instances[2]
win_vm.prepare_for_windows('windows_2012')
lin_vm = hosts.instances[3]
lin_vm.image_name = test_config.platform['centos_7_image']
lin_vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
if request.param in ['5.0.5', '5.1.0']:
old_mgr = hosts.instances[0]
old_mgr.wait_for_manager()
old_mgr.run_command('mv /etc/cloudify/ssl/rabbitmq{_,-}cert.pem',
use_sudo=True)
old_mgr.run_command('mv /etc/cloudify/ssl/rabbitmq{_,-}key.pem',
use_sudo=True)
old_mgr.run_command(
'chown rabbitmq. /etc/cloudify/ssl/rabbitmq-*', use_sudo=True)
old_mgr.run_command('systemctl restart cloudify-rabbitmq',
use_sudo=True)
yield hosts
finally:
hosts.destroy()
| Use correct rabbit certs on old IP setter | Use correct rabbit certs on old IP setter
| Python | apache-2.0 | cloudify-cosmo/cloudify-system-tests,cloudify-cosmo/cloudify-system-tests | ---
+++
@@ -27,6 +27,17 @@
hosts.create()
try:
+ if request.param in ['5.0.5', '5.1.0']:
+ old_mgr = hosts.instances[0]
+ old_mgr.wait_for_manager()
+ old_mgr.run_command('mv /etc/cloudify/ssl/rabbitmq{_,-}cert.pem',
+ use_sudo=True)
+ old_mgr.run_command('mv /etc/cloudify/ssl/rabbitmq{_,-}key.pem',
+ use_sudo=True)
+ old_mgr.run_command(
+ 'chown rabbitmq. /etc/cloudify/ssl/rabbitmq-*', use_sudo=True)
+ old_mgr.run_command('systemctl restart cloudify-rabbitmq',
+ use_sudo=True)
yield hosts
finally:
hosts.destroy() |
428ff018ccda3862446ebaadf61db1a03470c18f | tests/mltils/test_infrequent_value_encoder.py | tests/mltils/test_infrequent_value_encoder.py | # pylint: disable=missing-docstring, invalid-name, import-error
import pandas as pd
from mltils.encoders import InfrequentValueEncoder
def test_infrequent_value_encoder_1():
ive = InfrequentValueEncoder()
assert ive is not None
def test_infrequent_value_encoder_2():
df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
ive = InfrequentValueEncoder(thrshld=1, str_rpl='ifq')
encoded = ive.fit_transform(df)
expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'ifq']})
assert expected.equals(encoded)
| Add unit tests for InfrequentValueEncoder | Add unit tests for InfrequentValueEncoder
| Python | mit | rladeira/mltils | ---
+++
@@ -0,0 +1,17 @@
+# pylint: disable=missing-docstring, invalid-name, import-error
+import pandas as pd
+
+from mltils.encoders import InfrequentValueEncoder
+
+
+def test_infrequent_value_encoder_1():
+ ive = InfrequentValueEncoder()
+ assert ive is not None
+
+
+def test_infrequent_value_encoder_2():
+ df = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'c']})
+ ive = InfrequentValueEncoder(thrshld=1, str_rpl='ifq')
+ encoded = ive.fit_transform(df)
+ expected = pd.DataFrame({'A': ['a', 'a', 'b', 'b', 'ifq']})
+ assert expected.equals(encoded) | |
0fd7b771823b97cb5fb7789c981d4ab3befcd28e | bluebottle/homepage/models.py | bluebottle/homepage/models.py | from bluebottle.quotes.models import Quote
from bluebottle.slides.models import Slide
from bluebottle.statistics.models import Statistic
from bluebottle.projects.models import Project
class HomePage(object):
"""
Instead of serving all the objects separately we combine
Slide, Quote and Stats into a dummy object
"""
def get(self, language):
self.id = language
self.quotes = Quote.objects.published().filter(language=language)
self.slides = Slide.objects.published().filter(language=language)
self.statistics = Statistic.objects.filter(active=True, language=language).all()
projects = Project.objects.filter(is_campaign=True, status__viewable=True)
if language == 'en':
projects = projects.filter(language__code=language)
projects = projects.order_by('?')
if len(projects) > 4:
self.projects = projects[0:4]
elif len(projects) > 0:
self.projects = projects[0:len(projects)]
else:
self.projects = None
return self
| from bluebottle.quotes.models import Quote
from bluebottle.slides.models import Slide
from bluebottle.statistics.models import Statistic
from bluebottle.projects.models import Project
class HomePage(object):
"""
Instead of serving all the objects separately we combine
Slide, Quote and Stats into a dummy object
"""
def get(self, language):
self.id = language
self.quotes = Quote.objects.published().filter(language=language)
self.slides = Slide.objects.published().filter(language=language)
self.statistics = Statistic.objects.filter(active=True, language=language).all()
projects = Project.objects.filter(is_campaign=True, status__viewable=True)
if language == 'en':
projects = projects.filter(language__code=language)
projects = projects.order_by('?')
if len(projects) > 4:
self.projects = projects[0:4]
elif len(projects) > 0:
self.projects = projects[0:len(projects)]
else:
self.projects = Project.objects.none()
return self
| Send an empty list instead of None if no projects | Send an empty list instead of None if no projects
selected for homepage.
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | ---
+++
@@ -26,6 +26,6 @@
elif len(projects) > 0:
self.projects = projects[0:len(projects)]
else:
- self.projects = None
+ self.projects = Project.objects.none()
return self |
b46dc26e5e1b4c0388c330017dc52393417c3323 | tests/test_init.py | tests/test_init.py | from disco.test import TestCase, TestJob
class InitJob(TestJob):
sort = False
@staticmethod
def map_reader(stream, size, url, params):
params.x = 10
return (stream, size, url)
@staticmethod
def map_init(iter, params):
assert hasattr(params, 'x')
iter.next()
params['x'] += 100
@staticmethod
def map(e, params):
yield e, int(e) + params['x']
@staticmethod
def reduce_init(iter, params):
params['y'] = 1000
@staticmethod
def reduce(iter, params):
for k, v in iter:
yield k, int(v) + params['y']
class InitTestCase(TestCase):
def serve(self, path):
return 'skipthis\n' + ('%s\n' % path) * 10
def runTest(self):
self.job = InitJob().run(input=self.test_server.urls(range(10)))
results = list(self.results(self.job))
for k, v in results:
self.assertEquals(int(k) + 1110, int(v))
self.assertEquals(len(results), 100)
| from disco.test import TestCase, TestJob
class InitJob(TestJob):
params = {'x': 10}
sort = False
@staticmethod
def map_init(iter, params):
iter.next()
params['x'] += 100
@staticmethod
def map(e, params):
yield e, int(e) + params['x']
@staticmethod
def reduce_init(iter, params):
params['y'] = 1000
@staticmethod
def reduce(iter, params):
for k, v in iter:
yield k, int(v) + params['y']
class InitTestCase(TestCase):
def serve(self, path):
return 'skipthis\n' + ('%s\n' % path) * 10
def runTest(self):
self.job = InitJob().run(input=self.test_server.urls(range(10)))
results = list(self.results(self.job))
for k, v in results:
self.assertEquals(int(k) + 1110, int(v))
self.assertEquals(len(results), 100)
| Revert "added a test for the map_reader before map_init -case which fails currently" (deprecate init functions instead) | Revert "added a test for the map_reader before map_init -case which fails currently"
(deprecate init functions instead)
This reverts commit 88551bf444b7b358fea8e7eb4475df2c5d87ceeb.
| Python | bsd-3-clause | ErikDubbelboer/disco,pombredanne/disco,mwilliams3/disco,simudream/disco,pombredanne/disco,simudream/disco,mozilla/disco,beni55/disco,ErikDubbelboer/disco,pavlobaron/disco_playground,pooya/disco,ktkt2009/disco,scrapinghub/disco,seabirdzh/disco,pombredanne/disco,pooya/disco,pombredanne/disco,mwilliams3/disco,ktkt2009/disco,oldmantaiter/disco,simudream/disco,discoproject/disco,seabirdzh/disco,discoproject/disco,mwilliams3/disco,pavlobaron/disco_playground,oldmantaiter/disco,discoproject/disco,simudream/disco,beni55/disco,mozilla/disco,scrapinghub/disco,oldmantaiter/disco,pombredanne/disco,mwilliams3/disco,beni55/disco,mozilla/disco,scrapinghub/disco,ktkt2009/disco,scrapinghub/disco,seabirdzh/disco,discoproject/disco,seabirdzh/disco,beni55/disco,ErikDubbelboer/disco,mozilla/disco,ErikDubbelboer/disco,mwilliams3/disco,pooya/disco,seabirdzh/disco,pavlobaron/disco_playground,pavlobaron/disco_playground,ktkt2009/disco,ktkt2009/disco,discoproject/disco,simudream/disco,pooya/disco,beni55/disco,oldmantaiter/disco,oldmantaiter/disco,ErikDubbelboer/disco | ---
+++
@@ -1,16 +1,11 @@
from disco.test import TestCase, TestJob
class InitJob(TestJob):
+ params = {'x': 10}
sort = False
@staticmethod
- def map_reader(stream, size, url, params):
- params.x = 10
- return (stream, size, url)
-
- @staticmethod
def map_init(iter, params):
- assert hasattr(params, 'x')
iter.next()
params['x'] += 100
|
fefa46e21724fcd87cda0fa58101e1a74a31adec | molly/apps/places/importers/naptan.py | molly/apps/places/importers/naptan.py | from datetime import timedelta
import httplib
from tempfile import TemporaryFile
from zipfile import ZipFile
from celery.schedules import schedule
from molly.apps.places.parsers.naptan import NaptanParser
class NaptanImporter(object):
IMPORTER_NAME = 'naptan'
IMPORT_SCHEDULE = schedule(run_every=timedelta(weeks=1))
HTTP_HOST = "www.dft.gov.uk"
REMOTE_PATH = "/NaPTAN/snapshot/NaPTANxml.zip"
def __init__(self, config):
self._http_connection = httplib.HTTPConnection(self.HTTP_HOST)
self._url = "http://%s%s" % (self.HTTP_HOST, self.REMOTE_PATH)
def _get_file_from_url(self):
temporary = TemporaryFile()
self._http_connection.request('GET', self._url)
temporary.write(self._http_connection.getresponse().read())
return ZipFile(temporary).open('NaPTAN.xml')
def load(self):
parser = NaptanParser()
for stop in parser.import_from_file(self._get_file_from_url(), self._url):
self.stop_service.insert_and_merge(stop)
Provider = NaptanImporter
| from datetime import timedelta
import httplib
from tempfile import TemporaryFile
from zipfile import ZipFile
from celery.schedules import schedule
from molly.apps.places.parsers.naptan import NaptanParser
class NaptanImporter(object):
IMPORTER_NAME = 'naptan'
IMPORT_SCHEDULE = schedule(run_every=timedelta(weeks=1))
HTTP_HOST = "www.dft.gov.uk"
REMOTE_PATH = "/NaPTAN/snapshot/NaPTANxml.zip"
def __init__(self, config):
self._http_connection = httplib.HTTPConnection(self.HTTP_HOST)
self._url = "http://%s%s" % (self.HTTP_HOST, self.REMOTE_PATH)
def _get_file_from_url(self):
temporary = TemporaryFile()
self._http_connection.request('GET', self._url)
temporary.write(self._http_connection.getresponse().read())
return ZipFile(temporary).open('NaPTAN.xml')
def load(self):
parser = NaptanParser()
for stop in parser.import_from_file(self._get_file_from_url(), self._url):
self.poi_service.add_or_update(stop)
Provider = NaptanImporter
| Update the importer to use the places service | Update the importer to use the places service
| Python | apache-2.0 | ManchesterIO/mollyproject-next,ManchesterIO/mollyproject-next,ManchesterIO/mollyproject-next | ---
+++
@@ -28,7 +28,7 @@
def load(self):
parser = NaptanParser()
for stop in parser.import_from_file(self._get_file_from_url(), self._url):
- self.stop_service.insert_and_merge(stop)
+ self.poi_service.add_or_update(stop)
Provider = NaptanImporter |
ff0da634e1fa0f8b190a3ba2cac3a03f7df75f91 | memegen/test/test_routes__common.py | memegen/test/test_routes__common.py | # pylint: disable=unused-variable
from unittest.mock import patch, Mock
from memegen.app import create_app
from memegen.settings import get_config
from memegen.routes._common import display
def describe_display():
app = create_app(get_config('test'))
app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid'
request_html = Mock()
request_html.headers.get = Mock(return_value="text/html")
request_html.path = "it's a path"
@patch('memegen.routes._common.request', request_html)
def it_returns_html_for_browsers():
with app.test_request_context():
html = display("my_title", "my_path", raw=True)
print(html)
assert "<title>my_title</title>" in html
assert 'url("it\'s a path")' in html
assert "ga('create', 'my_tid', 'auto');" in html
| # pylint: disable=unused-variable,expression-not-assigned
from unittest.mock import patch, call, Mock
import pytest
from expecter import expect
from memegen.app import create_app
from memegen.settings import get_config
from memegen.routes._common import display
def describe_display():
@pytest.fixture
def app():
app = create_app(get_config('test'))
app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid'
return app
request_html = Mock(path="it's a path")
request_html.headers.get = Mock(return_value="text/html")
request_image = Mock(path="it's a path")
request_image.headers.get = Mock(return_value="(not a browser)")
@patch('memegen.routes._common.request', request_html)
def it_returns_html_for_browsers(app):
with app.test_request_context():
html = display("my_title", "my_path", raw=True)
print(html)
assert "<title>my_title</title>" in html
assert 'url("it\'s a path")' in html
assert "ga('create', 'my_tid', 'auto');" in html
@patch('memegen.routes._common._track')
@patch('memegen.routes._common.send_file')
@patch('memegen.routes._common.request', request_image)
def it_returns_an_image_otherwise(mock_send_file, mock_track):
display("my_title", "my_path")
expect(mock_track.mock_calls) == [
call("my_title"),
]
expect(mock_send_file.mock_calls) == [
call("my_path", mimetype='image/jpeg'),
]
| Test that a request defaults to sending an image | Test that a request defaults to sending an image
| Python | mit | joshfriend/memegen,joshfriend/memegen,DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen,joshfriend/memegen,joshfriend/memegen | ---
+++
@@ -1,6 +1,9 @@
-# pylint: disable=unused-variable
+# pylint: disable=unused-variable,expression-not-assigned
-from unittest.mock import patch, Mock
+from unittest.mock import patch, call, Mock
+
+import pytest
+from expecter import expect
from memegen.app import create_app
from memegen.settings import get_config
@@ -9,15 +12,20 @@
def describe_display():
- app = create_app(get_config('test'))
- app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid'
+ @pytest.fixture
+ def app():
+ app = create_app(get_config('test'))
+ app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid'
+ return app
- request_html = Mock()
+ request_html = Mock(path="it's a path")
request_html.headers.get = Mock(return_value="text/html")
- request_html.path = "it's a path"
+
+ request_image = Mock(path="it's a path")
+ request_image.headers.get = Mock(return_value="(not a browser)")
@patch('memegen.routes._common.request', request_html)
- def it_returns_html_for_browsers():
+ def it_returns_html_for_browsers(app):
with app.test_request_context():
html = display("my_title", "my_path", raw=True)
@@ -26,3 +34,17 @@
assert "<title>my_title</title>" in html
assert 'url("it\'s a path")' in html
assert "ga('create', 'my_tid', 'auto');" in html
+
+ @patch('memegen.routes._common._track')
+ @patch('memegen.routes._common.send_file')
+ @patch('memegen.routes._common.request', request_image)
+ def it_returns_an_image_otherwise(mock_send_file, mock_track):
+
+ display("my_title", "my_path")
+
+ expect(mock_track.mock_calls) == [
+ call("my_title"),
+ ]
+ expect(mock_send_file.mock_calls) == [
+ call("my_path", mimetype='image/jpeg'),
+ ] |
ccf60e9e79b8b2db8cbf7918caf23314e8790134 | lib/reporter.py | lib/reporter.py | #!/usr/bin/python
import sys
import os
name = sys.argv[1]
status = sys.stdin.readline()
status = status.rstrip(os.linesep)
print("<%s>" % name)
print("\t<status=\"%s\" />" % status)
if status != "SKIP":
print("\t<outcome>")
for line in sys.stdin:
# Escaping, ... !
print(line.rstrip(os.linesep))
print("\t</outcome>")
print("</%s>" % name)
| #!/usr/bin/python
import sys
import os
name = sys.argv[1]
status = sys.stdin.readline()
status = status.rstrip(os.linesep)
print("<%s status=\"%s\">" % (name, status))
print("\t<outcome>")
for line in sys.stdin:
# Escaping, ... !
print(line.rstrip(os.linesep))
print("\t</outcome>")
print("</%s>" % name)
| Fix the XML format produced | Fix the XML format produced
| Python | apache-2.0 | CESNET/secant,CESNET/secant | ---
+++
@@ -8,14 +8,12 @@
status = sys.stdin.readline()
status = status.rstrip(os.linesep)
-print("<%s>" % name)
-print("\t<status=\"%s\" />" % status)
+print("<%s status=\"%s\">" % (name, status))
-if status != "SKIP":
- print("\t<outcome>")
- for line in sys.stdin:
- # Escaping, ... !
- print(line.rstrip(os.linesep))
- print("\t</outcome>")
+print("\t<outcome>")
+for line in sys.stdin:
+ # Escaping, ... !
+ print(line.rstrip(os.linesep))
+print("\t</outcome>")
print("</%s>" % name) |
d2bac1fe8dc6d90d0d680a97aec0646ad9674bae | qrl/core/ntp.py | qrl/core/ntp.py | # coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import sys
from time import time
from ntplib import NTPClient
from qrl.core import logger
ntp_server = 'pool.ntp.org'
version = 3
times = 5
drift = None
def get_ntp_response():
try:
ntp_client = NTPClient()
response = ntp_client.request(ntp_server, version=version)
except Exception as e:
logger.exception(e)
sys.exit(0)
return response
def getNTP():
ntp_timestamp = 0
response = get_ntp_response()
if response:
ntp_timestamp = int(response.tx_time)
return ntp_timestamp
def setDrift():
global drift
response = get_ntp_response()
if not response:
return response
drift = response.offset
def getTime():
global drift
curr_time = drift + int(time())
return curr_time
| # coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import sys
from time import time
from ntplib import NTPClient
from qrl.core import logger
ntp_servers = ['pool.ntp.org', 'ntp.ubuntu.com']
NTP_VERSION = 3
NTP_RETRIES = 6
drift = None
def get_ntp_response():
for retry in range(NTP_RETRIES):
ntp_server = ntp_servers[retry % len(ntp_servers)]
try:
ntp_client = NTPClient()
response = ntp_client.request(ntp_server, version=NTP_VERSION)
except Exception as e:
logger.warning(e)
continue
return response
# FIXME: Provide some proper clean before exiting
logger.fatal("Could not contact NTP servers after %d retries", NTP_RETRIES)
sys.exit(-1)
def getNTP():
ntp_timestamp = 0
response = get_ntp_response()
if response:
ntp_timestamp = int(response.tx_time)
return ntp_timestamp
def setDrift():
global drift
response = get_ntp_response()
if not response:
return response
drift = response.offset
def getTime():
global drift
curr_time = drift + int(time())
return curr_time
| Support multiple servers and retries | Support multiple servers and retries
| Python | mit | jleni/QRL,elliottdehn/QRL,randomshinichi/QRL,elliottdehn/QRL,theQRL/QRL,theQRL/QRL,cyyber/QRL,randomshinichi/QRL,jleni/QRL,cyyber/QRL,elliottdehn/QRL,elliottdehn/QRL | ---
+++
@@ -9,20 +9,26 @@
from qrl.core import logger
-ntp_server = 'pool.ntp.org'
-version = 3
-times = 5
+ntp_servers = ['pool.ntp.org', 'ntp.ubuntu.com']
+NTP_VERSION = 3
+NTP_RETRIES = 6
drift = None
def get_ntp_response():
- try:
- ntp_client = NTPClient()
- response = ntp_client.request(ntp_server, version=version)
- except Exception as e:
- logger.exception(e)
- sys.exit(0)
- return response
+ for retry in range(NTP_RETRIES):
+ ntp_server = ntp_servers[retry % len(ntp_servers)]
+ try:
+ ntp_client = NTPClient()
+ response = ntp_client.request(ntp_server, version=NTP_VERSION)
+ except Exception as e:
+ logger.warning(e)
+ continue
+ return response
+
+ # FIXME: Provide some proper clean before exiting
+ logger.fatal("Could not contact NTP servers after %d retries", NTP_RETRIES)
+ sys.exit(-1)
def getNTP(): |
83ed5ca9bc388dbe9b2d82510842a99b3a2e5ce7 | src/personalisation/middleware.py | src/personalisation/middleware.py | from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
| from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
if not request.session.get('segments'):
request.session['segments'] = []
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
| Create empty 'segments' object in session if none exists | Create empty 'segments' object in session if none exists
| Python | mit | LabD/wagtail-personalisation,LabD/wagtail-personalisation,LabD/wagtail-personalisation | ---
+++
@@ -21,6 +21,9 @@
response = self.get_response(request)
+ if not request.session.get('segments'):
+ request.session['segments'] = []
+
print(request.session['segments'])
return response |
c0e98c14813c966ecd9e6b47395cb336a244f090 | discussion/forms.py | discussion/forms.py | from django import forms
from discussion.models import Comment, Post, Discussion
from notification.models import NoticeSetting
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
widgets = {
'body': forms.Textarea(attrs={'placeholder': 'Reply to this conversation'}),
}
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
widgets = {
'body': forms.Textarea(attrs={'placeholder': 'Start a conversation'}),
}
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(
required=False,
queryset=Discussion.objects,
empty_label='All discussions')
class SubscribeForm(forms.Form):
send = forms.ModelMultipleChoiceField(
NoticeSetting.objects,
required=False,
label=u'Notify me with updates from this discussion by',
widget=forms.CheckboxSelectMultiple)
def __init__(self, *args, **kwargs):
qs = kwargs.pop('qs')
super(SubscribeForm, self).__init__(*args, **kwargs)
self.fields['send'].queryset = qs
| from django import forms
from django.utils.translation import ugettext_lazy as _
from discussion.models import Comment, Post, Discussion
from notification.models import NoticeSetting
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
widgets = {
'body': forms.Textarea(attrs={'placeholder': _('Reply to this conversation')}),
}
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
widgets = {
'body': forms.Textarea(attrs={'placeholder': _('Start a conversation')}),
}
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(
required=False,
queryset=Discussion.objects,
empty_label=_('All discussions'))
class SubscribeForm(forms.Form):
send = forms.ModelMultipleChoiceField(
NoticeSetting.objects,
required=False,
label=_('Notify me with updates from this discussion by'),
widget=forms.CheckboxSelectMultiple)
def __init__(self, *args, **kwargs):
qs = kwargs.pop('qs')
super(SubscribeForm, self).__init__(*args, **kwargs)
self.fields['send'].queryset = qs
| Make the odd string translatable. | Make the odd string translatable.
| Python | bsd-2-clause | incuna/django-discussion,lehins/lehins-discussion,lehins/lehins-discussion,incuna/django-discussion,lehins/lehins-discussion | ---
+++
@@ -1,4 +1,5 @@
from django import forms
+from django.utils.translation import ugettext_lazy as _
from discussion.models import Comment, Post, Discussion
from notification.models import NoticeSetting
@@ -9,7 +10,7 @@
exclude = ('user', 'post')
model = Comment
widgets = {
- 'body': forms.Textarea(attrs={'placeholder': 'Reply to this conversation'}),
+ 'body': forms.Textarea(attrs={'placeholder': _('Reply to this conversation')}),
}
@@ -18,7 +19,7 @@
exclude = ('user', 'discussion')
model = Post
widgets = {
- 'body': forms.Textarea(attrs={'placeholder': 'Start a conversation'}),
+ 'body': forms.Textarea(attrs={'placeholder': _('Start a conversation')}),
}
@@ -27,14 +28,14 @@
discussion = forms.ModelChoiceField(
required=False,
queryset=Discussion.objects,
- empty_label='All discussions')
+ empty_label=_('All discussions'))
class SubscribeForm(forms.Form):
send = forms.ModelMultipleChoiceField(
NoticeSetting.objects,
required=False,
- label=u'Notify me with updates from this discussion by',
+ label=_('Notify me with updates from this discussion by'),
widget=forms.CheckboxSelectMultiple)
def __init__(self, *args, **kwargs): |
18e310680f7dfd8f5a5186baf37cab9968f19012 | django_base/urls.py | django_base/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
)
| Remove admin docs by default since they are never used. | Remove admin docs by default since they are never used. | Python | bsd-3-clause | SheepDogInc/django-base,SheepDogInc/django-base | ---
+++
@@ -1,9 +1,8 @@
from django.conf.urls import patterns, include, url
+
from django.contrib import admin
-
admin.autodiscover()
urlpatterns = patterns('',
- url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
) |
d6601b9d7bdbf81d89f3d165f11845384d09797c | dsub/_dsub_version.py | dsub/_dsub_version.py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.2.dev0'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.2'
| Update dsub version to 0.4.2 | Update dsub version to 0.4.2
PiperOrigin-RevId: 337172014
| Python | apache-2.0 | DataBiosphere/dsub,DataBiosphere/dsub | ---
+++
@@ -26,4 +26,4 @@
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
-DSUB_VERSION = '0.4.2.dev0'
+DSUB_VERSION = '0.4.2' |
24045cd16a862ebd31f4a88a733a05bf2aff03a5 | easygeoip/urls_api.py | easygeoip/urls_api.py | from django.conf.urls import patterns, url
# API URLs
from .views import LocationFromIpView
urlpatterns = patterns('',
url(r'^location/(?P<ip_address>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}))/$', LocationFromIpView.as_view(),
name='geoip-explicit-ip-view'),
url(r'^location/$', LocationFromIpView.as_view(), name='geoip-implicit-ip-view') # Take IP addr from request
)
| from django.conf.urls import url
# API URLs
from .views import LocationFromIpView
urlpatterns = [
url(r'^location/(?P<ip_address>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}))/$', LocationFromIpView.as_view(),
name='geoip-explicit-ip-view'),
url(r'^location/$', LocationFromIpView.as_view(), name='geoip-implicit-ip-view') # Take IP addr from request
]
| Upgrade to new urlpatterns format | Upgrade to new urlpatterns format | Python | mit | lambdacomplete/django-easygeoip | ---
+++
@@ -1,10 +1,10 @@
-from django.conf.urls import patterns, url
+from django.conf.urls import url
# API URLs
from .views import LocationFromIpView
-urlpatterns = patterns('',
+urlpatterns = [
url(r'^location/(?P<ip_address>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}))/$', LocationFromIpView.as_view(),
name='geoip-explicit-ip-view'),
url(r'^location/$', LocationFromIpView.as_view(), name='geoip-implicit-ip-view') # Take IP addr from request
-)
+] |
cb4973909ea662abdf718e5a831806dcb0ecc821 | 14B-088/HI/HI_correct_mask_model.py | 14B-088/HI/HI_correct_mask_model.py |
'''
Swap the spatial axes. Swap the spectral and stokes axes.
'''
import sys
from astropy.io import fits
hdu = fits.open(sys.argv[1], mode='update')
hdu[0].data = hdu[0].data.swapaxes(0, 1)
hdu[0].data = hdu[0].data[:, :, :, ::-1]
hdu[0].data = hdu[0].data[:, :, ::-1, :]
hdu.flush()
execfile("~/Dropbox/code_development/ewky_scripts/header_swap_axis.py")
hdu[0].header = header_swapaxes(hdu[0].header, 2, 3)
hdu.flush()
hdu.close()
|
'''
\Swap the spectral and stokes axes. Needed due to issue in regridding function
'''
import sys
from astropy.io import fits
hdu = fits.open(sys.argv[1], mode='update')
hdu[0].data = hdu[0].data.swapaxes(0, 1)
execfile("/home/eric/Dropbox/code_development/ewky_scripts/header_swap_axis.py")
hdu[0].header = header_swapaxes(hdu[0].header, 2, 3)
hdu.flush()
hdu.close()
| Update what's needed to correct mask and model | Update what's needed to correct mask and model
| Python | mit | e-koch/VLA_Lband,e-koch/VLA_Lband | ---
+++
@@ -1,6 +1,6 @@
'''
-Swap the spatial axes. Swap the spectral and stokes axes.
+\Swap the spectral and stokes axes. Needed due to issue in regridding function
'''
import sys
@@ -11,12 +11,7 @@
hdu[0].data = hdu[0].data.swapaxes(0, 1)
-hdu[0].data = hdu[0].data[:, :, :, ::-1]
-hdu[0].data = hdu[0].data[:, :, ::-1, :]
-
-hdu.flush()
-
-execfile("~/Dropbox/code_development/ewky_scripts/header_swap_axis.py")
+execfile("/home/eric/Dropbox/code_development/ewky_scripts/header_swap_axis.py")
hdu[0].header = header_swapaxes(hdu[0].header, 2, 3)
|
f16add1160e5a76f94be30ea54cea27045c32705 | tests/test_blacklist.py | tests/test_blacklist.py | import unittest
import config
from .. import ntokloapi
class BlacklistTest(unittest.TestCase):
def setUp(self):
self.blacklist = ntokloapi.Blacklist(config.TEST_KEY, config.TEST_SECRET)
def test_blacklist_add_singleitem(self):
response = self.blacklist.add(productid=['10201', ])
assert response == "204"
def test_blacklist_add_multipleitems(self):
response = self.blacklist.add(productid=['10202', '10203'])
assert response == "204"
def test_blacklist_add_empty_elements(self):
response = self.blacklist.add(productid=['10204', '10205', '', ''])
assert response == "204"
def test_blacklist_remove_singleitem(self):
response = self.blacklist.remove(productid=['10201', ])
assert response == "204"
def test_blacklist_remove_multipleitems(self):
response = self.blacklist.remove(productid=['10202', '10203'])
assert response == "204"
def test_blacklist_remove_empty_elements(self):
response = self.blacklist.remove(productid=['10204', '10205', '', ''])
assert response == "204"
def test_blacklist_show_items(self):
response = self.blacklist.list()
assert not response
| import unittest
import config
import ntokloapi
class BlacklistTest(unittest.TestCase):
def setUp(self):
self.blacklist = ntokloapi.Blacklist(config.TEST_KEY, config.TEST_SECRET)
def test_blacklist_add_singleitem(self):
response = self.blacklist.add(productid=['10201', ])
assert response == 204
def test_blacklist_add_multipleitems(self):
response = self.blacklist.add(productid=['10202', '10203'])
assert response == 204
def test_blacklist_add_empty_elements(self):
response = self.blacklist.add(productid=['10204', '10205', '', ''])
assert response == 204
def test_blacklist_remove_singleitem(self):
response = self.blacklist.remove(productid=['10201', ])
assert response == 204
def test_blacklist_remove_multipleitems(self):
response = self.blacklist.remove(productid=['10202', '10203'])
assert response == 204
def test_blacklist_remove_empty_elements(self):
response = self.blacklist.remove(productid=['10204', '10205', '', ''])
assert response == 204
def test_blacklist_show_items(self):
response = self.blacklist.list()
assert not response
| Fix unit tests for the blacklist | Fix unit tests for the blacklist
| Python | apache-2.0 | nToklo/ntokloapi-python | ---
+++
@@ -1,7 +1,7 @@
import unittest
import config
-from .. import ntokloapi
+import ntokloapi
class BlacklistTest(unittest.TestCase):
@@ -12,27 +12,27 @@
def test_blacklist_add_singleitem(self):
response = self.blacklist.add(productid=['10201', ])
- assert response == "204"
+ assert response == 204
def test_blacklist_add_multipleitems(self):
response = self.blacklist.add(productid=['10202', '10203'])
- assert response == "204"
+ assert response == 204
def test_blacklist_add_empty_elements(self):
response = self.blacklist.add(productid=['10204', '10205', '', ''])
- assert response == "204"
+ assert response == 204
def test_blacklist_remove_singleitem(self):
response = self.blacklist.remove(productid=['10201', ])
- assert response == "204"
+ assert response == 204
def test_blacklist_remove_multipleitems(self):
response = self.blacklist.remove(productid=['10202', '10203'])
- assert response == "204"
+ assert response == 204
def test_blacklist_remove_empty_elements(self):
response = self.blacklist.remove(productid=['10204', '10205', '', ''])
- assert response == "204"
+ assert response == 204
def test_blacklist_show_items(self):
response = self.blacklist.list() |
f8f0335a1a790b1ef8163a2be968b29769be80a2 | arim/models.py | arim/models.py | from django.db import models
class Lease(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
| from django.db import models
from ipaddr import IPv4Address
class Lease(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
def __str__(self):
return unicode(self).encode('ascii', 'replace')
def __unicode__(self):
return unicode(IPv4Address(self.ip)) + u' = ' + unicode(self.mac)
def __repr__(self):
return u'<Lease: ' + unicode(self) + u'>'
| Add __str__, __unicode__, and __repr__ | Add __str__, __unicode__, and __repr__
| Python | bsd-3-clause | drkitty/arim,OSU-Net/arim,OSU-Net/arim,drkitty/arim,drkitty/arim,OSU-Net/arim | ---
+++
@@ -1,4 +1,5 @@
from django.db import models
+from ipaddr import IPv4Address
class Lease(models.Model):
@@ -8,3 +9,12 @@
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
+
+ def __str__(self):
+ return unicode(self).encode('ascii', 'replace')
+
+ def __unicode__(self):
+ return unicode(IPv4Address(self.ip)) + u' = ' + unicode(self.mac)
+
+ def __repr__(self):
+ return u'<Lease: ' + unicode(self) + u'>' |
f21ae3ffb99c5b90cb329317b2c6282e4992f6cc | safety/utils.py | safety/utils.py | # -*- coding: utf-8 -*-
import importlib
import re
import warnings
from django.conf import settings
from django.utils.translation import ugettext_lazy as _, ugettext
BROWSERS = (
(re.compile('Chrome'), _('Chrome')),
(re.compile('Safari'), _('Safari')),
(re.compile('Firefox'), _('Firefox')),
(re.compile('Opera'), _('Opera')),
(re.compile('IE'), _('Internet Explorer')),
)
DEVICES = (
(re.compile('Android'), _('Android')),
(re.compile('Linux'), _('Linux')),
(re.compile('iPhone'), _('iPhone')),
(re.compile('iPad'), _('iPad')),
(re.compile('(Mac OS X)'), _('OS X')),
(re.compile('NT 5.1'), _('Windows XP')),
(re.compile('NT 6.0'), _('Windows Vista')),
(re.compile('NT 6.1'), _('Windows 7')),
(re.compile('NT 6.2'), _('Windows 8')),
(re.compile('NT 6.3'), _('Windows 8.1')),
(re.compile('Windows'), _('Windows')),
)
def get_device(user_agent):
"""
Transform a User Agent into a human readable text.
"""
infos = []
for regex, name in BROWSERS:
if regex.search(user_agent):
infos.append('%s' % name)
break
for regex, name in DEVICES:
if regex.search(user_agent):
infos.append('%s' % name)
break
return ', '.join(infos)
def get_session_store():
mod = getattr(settings, 'SESSION_ENGINE', 'django.contrib.sessions.backends.db')
engine = importlib.import_module(mod)
store = engine.SessionStore()
return store
| # -*- coding: utf-8 -*-
try:
from django.utils.importlib import import_module
except ImportError:
from importlib import import_module
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_session_store():
mod = getattr(settings, 'SESSION_ENGINE', 'django.contrib.sessions.backends.db')
engine = import_module(mod)
store = engine.SessionStore()
return store
def get_resolver(request, setting):
module_path = getattr(app_settings, setting)
try:
module, attribute = module_path.rsplit('.', 1)
resolver_module = import_module(module)
resolver = getattr(resolver_module, attribute)
except ImportError:
raise ImproperlyConfigured(
"Please specify a valid %s module. "
"Could not find %s " % (setting, module))
except AttributeError:
raise ImproperlyConfigured(
"Please specify a valid %s "
"function. Could not find %s function in module %s" %
(setting, attribute, module))
return resolver(request)
| Add get_resolver() util and remove get_device() (now use ua-parser). | Add get_resolver() util and remove get_device() (now use ua-parser).
| Python | mit | ulule/django-safety,ulule/django-safety | ---
+++
@@ -1,56 +1,37 @@
# -*- coding: utf-8 -*-
-import importlib
-import re
-import warnings
+try:
+ from django.utils.importlib import import_module
+except ImportError:
+ from importlib import import_module
from django.conf import settings
-from django.utils.translation import ugettext_lazy as _, ugettext
-
-
-BROWSERS = (
- (re.compile('Chrome'), _('Chrome')),
- (re.compile('Safari'), _('Safari')),
- (re.compile('Firefox'), _('Firefox')),
- (re.compile('Opera'), _('Opera')),
- (re.compile('IE'), _('Internet Explorer')),
-)
-
-DEVICES = (
- (re.compile('Android'), _('Android')),
- (re.compile('Linux'), _('Linux')),
- (re.compile('iPhone'), _('iPhone')),
- (re.compile('iPad'), _('iPad')),
- (re.compile('(Mac OS X)'), _('OS X')),
- (re.compile('NT 5.1'), _('Windows XP')),
- (re.compile('NT 6.0'), _('Windows Vista')),
- (re.compile('NT 6.1'), _('Windows 7')),
- (re.compile('NT 6.2'), _('Windows 8')),
- (re.compile('NT 6.3'), _('Windows 8.1')),
- (re.compile('Windows'), _('Windows')),
-)
-
-
-def get_device(user_agent):
- """
- Transform a User Agent into a human readable text.
- """
- infos = []
-
- for regex, name in BROWSERS:
- if regex.search(user_agent):
- infos.append('%s' % name)
- break
-
- for regex, name in DEVICES:
- if regex.search(user_agent):
- infos.append('%s' % name)
- break
-
- return ', '.join(infos)
+from django.core.exceptions import ImproperlyConfigured
def get_session_store():
mod = getattr(settings, 'SESSION_ENGINE', 'django.contrib.sessions.backends.db')
- engine = importlib.import_module(mod)
+ engine = import_module(mod)
store = engine.SessionStore()
return store
+
+
+def get_resolver(request, setting):
+ module_path = getattr(app_settings, setting)
+
+ try:
+ module, attribute = module_path.rsplit('.', 1)
+ resolver_module = import_module(module)
+ resolver = getattr(resolver_module, attribute)
+
+ except ImportError:
+ raise ImproperlyConfigured(
+ "Please specify a valid %s module. "
+ "Could not find %s " % (setting, module))
+
+ except AttributeError:
+ raise ImproperlyConfigured(
+ "Please specify a valid %s "
+ "function. Could not find %s function in module %s" %
+ (setting, attribute, module))
+
+ return resolver(request) |
eda91552ae26188afbad74115495e44e07827c4d | typ/version.py | typ/version.py | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = '0.9.4'
| # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = '0.9.4pre'
| Add a -vvv mode to log when tests are queued for running. | Add a -vvv mode to log when tests are queued for running.
If one is running a bunch of tests in parallel and something
is not working right, it can be useful to see which tests are
currently executing at the same time. There isn't a great way
to do this in typ, because we don't know when tests are actually
picked up for execution by the child processes, but we can at
least log which tests has been queued for running.
This patch adds a -vvv option (verbose=3). This will make
the test log twice as long, since we will now log lines for
when every test starts and stops, but at least that can help
with debugging.
| Python | apache-2.0 | dpranke/typ | ---
+++
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-VERSION = '0.9.4'
+VERSION = '0.9.4pre' |
8b374d041d97307962cdf562c52b2a72345a4efc | snowman/urls.py | snowman/urls.py | """snowman URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.views.generic import RedirectView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^', include('api.router')),
url(r'^.*$', RedirectView.as_view(pattern_name='api-root', permanent=True), name='index')
]
| """snowman URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.views.generic import RedirectView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^', include('api.router')),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^.*$', RedirectView.as_view(pattern_name='api-root', permanent=True), name='index')
]
| Add simple login form in the API. | Add simple login form in the API.
This is usefull for developers to explore the Browlable api directly
on the browser.
| Python | mit | johnnywell/snowman | ---
+++
@@ -20,5 +20,6 @@
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^', include('api.router')),
+ url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^.*$', RedirectView.as_view(pattern_name='api-root', permanent=True), name='index')
] |
0d5946f0c61bcb629d8a1bbacf09bcc5719986fc | textRenderer.py | textRenderer.py | import colorsys
from PIL import Image, ImageFont, ImageDraw
class TextRenderer:
def __init__(self, font="./NotoSansCJK-Bold.otf",
font_color=(0, 120, 0), color_bg=False):
self.image = None
# params
self.color_bg = color_bg
self.font_color = font_color
# new image and font
self.font = ImageFont.truetype(font, 30)
return None
def getFrameCount(self):
return 1
def rainbow_bg(c):
# hue, lightness, saturation to rgb
vals = colorsys.hls_to_rgb(round(c / 360.0, 2), 0.05, 1)
return (int(vals[0] * 255), int(vals[1] * 255), int(vals[2] * 255))
def draw_text(self, text_to_send):
x, y = self.font.getsize(text_to_send)
self.im = Image.new("RGBA", (x, y+10), "black")
# Add padding below, because PIL sucks!
self.draw = ImageDraw.Draw(self.im)
self.draw.text(
(0, 0), text_to_send, font=self.font, fill=self.font_color
)
def render(self, msgText):
self.draw_text(' '.join(msgText))
def getImage(self):
return self.image
def get_queue_token(self, msgToken):
queue_token = {}
# TODO: add possible params
self.render(msgToken["text"])
queue_token["image"] = [self.im]
queue_token["frame_count"] = self.getFrameCount()
queue_token["action"] = "scroll"
queue_token["valid"] = True
return queue_token
| from PIL import Image, ImageFont, ImageDraw
class TextRenderer:
def __init__(self, font="./NotoSansCJK-Bold.otf",
font_color=(0, 120, 0), color_bg=False):
# params
self.color_bg = color_bg
self.font_color = font_color
# new image and font
self.font = ImageFont.truetype(font, 30)
def draw_text(self, text_to_send):
x, y = self.font.getsize(text_to_send)
# Add padding below, because PIL sucks!
image = Image.new("RGBA", (x, y+10), "black")
ImageDraw.Draw(image).text(
(0, 0), text_to_send, font=self.font, fill=self.font_color
)
return image
def get_queue_token(self, msgToken):
queue_token = {}
# TODO: add possible params
queue_token["image"] = [self.draw_text(' '.join(msgToken["text"]))]
queue_token["frame_count"] = 1
queue_token["action"] = "scroll"
queue_token["valid"] = True
return queue_token
| Remove all unused code in text renderer. | Remove all unused code in text renderer.
| Python | mit | marqsm/LED-bot,marqsm/LED-bot,marqsm/LED-bot,marqsm/LED-bot | ---
+++
@@ -1,4 +1,3 @@
-import colorsys
from PIL import Image, ImageFont, ImageDraw
@@ -6,47 +5,30 @@
def __init__(self, font="./NotoSansCJK-Bold.otf",
font_color=(0, 120, 0), color_bg=False):
- self.image = None
# params
self.color_bg = color_bg
self.font_color = font_color
-
# new image and font
self.font = ImageFont.truetype(font, 30)
- return None
-
- def getFrameCount(self):
- return 1
-
- def rainbow_bg(c):
- # hue, lightness, saturation to rgb
- vals = colorsys.hls_to_rgb(round(c / 360.0, 2), 0.05, 1)
- return (int(vals[0] * 255), int(vals[1] * 255), int(vals[2] * 255))
def draw_text(self, text_to_send):
x, y = self.font.getsize(text_to_send)
- self.im = Image.new("RGBA", (x, y+10), "black")
# Add padding below, because PIL sucks!
- self.draw = ImageDraw.Draw(self.im)
+ image = Image.new("RGBA", (x, y+10), "black")
- self.draw.text(
+ ImageDraw.Draw(image).text(
(0, 0), text_to_send, font=self.font, fill=self.font_color
)
- def render(self, msgText):
- self.draw_text(' '.join(msgText))
-
- def getImage(self):
- return self.image
+ return image
def get_queue_token(self, msgToken):
queue_token = {}
# TODO: add possible params
- self.render(msgToken["text"])
- queue_token["image"] = [self.im]
- queue_token["frame_count"] = self.getFrameCount()
+ queue_token["image"] = [self.draw_text(' '.join(msgToken["text"]))]
+ queue_token["frame_count"] = 1
queue_token["action"] = "scroll"
queue_token["valid"] = True
|
edf38ad11631ad5e793eb9ac95dbc865595d517b | glue_vispy_viewers/common/layer_state.py | glue_vispy_viewers/common/layer_state.py | from __future__ import absolute_import, division, print_function
from glue.external.echo import CallbackProperty, keep_in_sync
from glue.core.state_objects import State
__all__ = ['VispyLayerState']
class VispyLayerState(State):
"""
A base state object for all Vispy layers
"""
layer = CallbackProperty()
visible = CallbackProperty(True)
zorder = CallbackProperty(0)
color = CallbackProperty()
alpha = CallbackProperty()
def __init__(self, **kwargs):
super(VispyLayerState, self).__init__(**kwargs)
self._sync_color = None
self._sync_alpha = None
self.add_callback('layer', self._layer_changed)
self._layer_changed()
def _layer_changed(self):
if self._sync_color is not None:
self._sync_color.stop_syncing()
if self._sync_alpha is not None:
self._sync_alpha.stop_syncing()
if self.layer is not None:
self.color = self.layer.style.color
self.alpha = self.layer.style.alpha
self._sync_color = keep_in_sync(self, 'color', self.layer.style, 'color')
self._sync_alpha = keep_in_sync(self, 'alpha', self.layer.style, 'alpha')
| from __future__ import absolute_import, division, print_function
from glue.external.echo import CallbackProperty, keep_in_sync
from glue.core.state_objects import State
from glue.core.message import LayerArtistUpdatedMessage
__all__ = ['VispyLayerState']
class VispyLayerState(State):
"""
A base state object for all Vispy layers
"""
layer = CallbackProperty()
visible = CallbackProperty(True)
zorder = CallbackProperty(0)
color = CallbackProperty()
alpha = CallbackProperty()
def __init__(self, **kwargs):
super(VispyLayerState, self).__init__(**kwargs)
self._sync_color = None
self._sync_alpha = None
self.add_callback('layer', self._layer_changed)
self._layer_changed()
self.add_global_callback(self._notify_layer_update)
def _notify_layer_update(self, **kwargs):
message = LayerArtistUpdatedMessage(self)
if self.layer is not None and self.layer.hub is not None:
self.layer.hub.broadcast(message)
def _layer_changed(self):
if self._sync_color is not None:
self._sync_color.stop_syncing()
if self._sync_alpha is not None:
self._sync_alpha.stop_syncing()
if self.layer is not None:
self.color = self.layer.style.color
self.alpha = self.layer.style.alpha
self._sync_color = keep_in_sync(self, 'color', self.layer.style, 'color')
self._sync_alpha = keep_in_sync(self, 'alpha', self.layer.style, 'alpha')
| Make sure layer artist icon updates when changing the color mode or colormaps | Make sure layer artist icon updates when changing the color mode or colormaps | Python | bsd-2-clause | glue-viz/glue-vispy-viewers,PennyQ/astro-vispy,astrofrog/glue-3d-viewer,glue-viz/glue-3d-viewer,astrofrog/glue-vispy-viewers | ---
+++
@@ -2,6 +2,7 @@
from glue.external.echo import CallbackProperty, keep_in_sync
from glue.core.state_objects import State
+from glue.core.message import LayerArtistUpdatedMessage
__all__ = ['VispyLayerState']
@@ -27,6 +28,13 @@
self.add_callback('layer', self._layer_changed)
self._layer_changed()
+ self.add_global_callback(self._notify_layer_update)
+
+ def _notify_layer_update(self, **kwargs):
+ message = LayerArtistUpdatedMessage(self)
+ if self.layer is not None and self.layer.hub is not None:
+ self.layer.hub.broadcast(message)
+
def _layer_changed(self):
if self._sync_color is not None: |
818d6584164f04001bf0e75f62c526284521ce69 | demae/dest/s3_dest.py | demae/dest/s3_dest.py | import pandas as pd
import gzip
import boto3
import re
def default_key_map(key):
return re.sub('_input', '_output', key)
class S3Dest():
def __init__(self, key_map=default_key_map):
self.key_map = key_map
def skip_keys(self, bucket, source_prefix):
s3 = boto3.resource('s3')
objs = s3.Bucket(bucket).objects.filter(Prefix=self.key_map(source_prefix))
return [obj.key for obj in objs]
def put(self, data, obj):
body = self.generate_output_file(data)
dest_key = self.key_map(obj.key)
s3 = boto3.resource('s3')
s3.Object(obj.bucket_name, dest_key).put(Body=body)
def generate_output_file(self, data):
df = pd.DataFrame(data)
tsv = df.to_csv(sep='\t', header=False, index=False)
return gzip.compress(tsv.encode())
| import pandas as pd
import gzip
import boto3
import re
import io
def default_key_map(key):
return re.sub('_input', '_output', key)
class S3Dest():
def __init__(self, key_map=default_key_map):
self.key_map = key_map
def skip_keys(self, bucket, source_prefix):
s3 = boto3.resource('s3')
objs = s3.Bucket(bucket).objects.filter(Prefix=self.key_map(source_prefix))
return [obj.key for obj in objs]
def put(self, data, obj):
body = self.generate_output_file(data)
dest_key = self.key_map(obj.key)
s3 = boto3.resource('s3')
s3.Object(obj.bucket_name, dest_key).upload_fileobj(io.BytesIO(body))
def generate_output_file(self, data):
df = pd.DataFrame(data)
tsv = df.to_csv(sep='\t', header=False, index=False)
return gzip.compress(tsv.encode())
| Use managed transfer for uploading | Use managed transfer for uploading
| Python | mit | uiureo/demae | ---
+++
@@ -2,6 +2,7 @@
import gzip
import boto3
import re
+import io
def default_key_map(key):
@@ -22,7 +23,7 @@
dest_key = self.key_map(obj.key)
s3 = boto3.resource('s3')
- s3.Object(obj.bucket_name, dest_key).put(Body=body)
+ s3.Object(obj.bucket_name, dest_key).upload_fileobj(io.BytesIO(body))
def generate_output_file(self, data):
df = pd.DataFrame(data) |
f6e18d142ac965221737205f65d66751ea02f168 | hack_plot/management/commands/parse_authlog.py | hack_plot/management/commands/parse_authlog.py | from django.core.management.base import BaseCommand, CommandError
from ...cron import parse_auth_log
class Command(BaseCommand):
def handle(self, *args, **options):
parse_auth_log()
| from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
try:
import simplejson as json
except ImportError as e:
import json
from rest_framework.renderers import JSONRenderer
from unipath import Path
from ...api.serializers import HackLocationSerializer
from ...cron import parse_auth_log
from ...models import SshHackLocation
class Command(BaseCommand):
def handle(self, *args, **options):
parse_auth_log()
# Write the attempt data to json for fast AJAX loading
serializer = HackLocationSerializer(SshHackLocation.objects.all(), many=True)
data = JSONRenderer().render(serializer.data, 'application/json', {})
json_output_file = Path(settings.STATIC_ROOT).child('hack_location.json')
with open(json_output_file, 'wb') as f:
f.write(data)
| Write hack locations to json after parsing log file | Write hack locations to json after parsing log file
| Python | mit | hellsgate1001/graphs,hellsgate1001/graphs,hellsgate1001/graphs | ---
+++
@@ -1,7 +1,24 @@
+from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
+try:
+ import simplejson as json
+except ImportError as e:
+ import json
+
+from rest_framework.renderers import JSONRenderer
+from unipath import Path
+
+from ...api.serializers import HackLocationSerializer
from ...cron import parse_auth_log
+from ...models import SshHackLocation
class Command(BaseCommand):
def handle(self, *args, **options):
parse_auth_log()
+ # Write the attempt data to json for fast AJAX loading
+ serializer = HackLocationSerializer(SshHackLocation.objects.all(), many=True)
+ data = JSONRenderer().render(serializer.data, 'application/json', {})
+ json_output_file = Path(settings.STATIC_ROOT).child('hack_location.json')
+ with open(json_output_file, 'wb') as f:
+ f.write(data) |
49069663a3fe3d44be9ab59e59a90d0dfcf49f0c | mayatools/qt.py | mayatools/qt.py |
try:
import sip
from uitools.qt import QtCore
import maya.OpenMayaUI as apiUI
# These modules will not exist while building the docs.
except ImportError:
import os
if os.environ.get('SPHINX') != 'True':
raise
def get_maya_window():
"""Get the main Maya window as a QtGui.QMainWindow."""
ptr = apiUI.MQtUtil.mainWindow()
if ptr is not None:
return sip.wrapinstance(long(ptr), QtCore.QObject)
def maya_to_qt(maya_object):
"""Convert a Maya UI path to a Qt object.
:param str maya_object: The path of the Maya UI object to convert.
:returns: QtCore.QObject or None
"""
ptr = (
apiUI.MQtUtil.findControl(maya_object) or
apiUI.MQtUtil.findLayout(maya_object) or
apiUI.MQtUtil.findMenuItem(maya_object)
)
if ptr is not None:
return sip.wrapinstance(long(ptr), QtCore.QObject) |
try:
from uitools.sip import wrapinstance
from uitools.qt import QtCore
import maya.OpenMayaUI as apiUI
# These modules will not exist while building the docs.
except ImportError:
import os
if os.environ.get('SPHINX') != 'True':
raise
def get_maya_window():
"""Get the main Maya window as a QtGui.QMainWindow."""
ptr = apiUI.MQtUtil.mainWindow()
if ptr is not None:
return wrapinstance(long(ptr), QtCore.QObject)
def maya_to_qt(maya_object):
"""Convert a Maya UI path to a Qt object.
:param str maya_object: The path of the Maya UI object to convert.
:returns: QtCore.QObject or None
"""
ptr = (
apiUI.MQtUtil.findControl(maya_object) or
apiUI.MQtUtil.findLayout(maya_object) or
apiUI.MQtUtil.findMenuItem(maya_object)
)
if ptr is not None:
return wrapinstance(long(ptr), QtCore.QObject) | Use uitools.sip instead of straight sip | Use uitools.sip instead of straight sip | Python | bsd-3-clause | westernx/mayatools,westernx/mayatools | ---
+++
@@ -1,6 +1,6 @@
try:
- import sip
+ from uitools.sip import wrapinstance
from uitools.qt import QtCore
import maya.OpenMayaUI as apiUI
@@ -15,7 +15,7 @@
"""Get the main Maya window as a QtGui.QMainWindow."""
ptr = apiUI.MQtUtil.mainWindow()
if ptr is not None:
- return sip.wrapinstance(long(ptr), QtCore.QObject)
+ return wrapinstance(long(ptr), QtCore.QObject)
def maya_to_qt(maya_object):
@@ -32,4 +32,4 @@
apiUI.MQtUtil.findMenuItem(maya_object)
)
if ptr is not None:
- return sip.wrapinstance(long(ptr), QtCore.QObject)
+ return wrapinstance(long(ptr), QtCore.QObject) |
3d4327f6d9d71c6b396b0655de81373210417aba | apps/i4p_base/urls.py | apps/i4p_base/urls.py | #-- encoding: utf-8 --
from django.conf.urls.defaults import patterns, url
from haystack.views import search_view_factory
import views
import ajax
urlpatterns = patterns('',
url(r'^$', views.homepage, name='i4p-index'),
url(r'^homepage/ajax/slider/bestof/$', ajax.slider_bestof, name='i4p-homepage-ajax-slider-bestof'),
url(r'^homepage/ajax/slider/latest/$', ajax.slider_latest, name='i4p-homepage-ajax-slider-latest'),
url(r'^homepage/ajax/slider/commented/$', ajax.slider_most_commented, name='i4p-homepage-ajax-slider-commented'),
url(r'^history/check_version/(?P<pk>[\d]+)$', views.VersionActivityCheckView.as_view(), name='history-check-version'),
url(r'^search/', search_view_factory(view_class=views.SearchView), name='i4p-search'),
url(r'^location/(?P<location_id>\d+)', views.LocationEditView.as_view(), name='i4p-location-edit'),
url(r'^locations/$', views.LocationListView.as_view(), name='i4p-location-list'),
url(r'^locations/missing/(?P<missing_field_name>\w+)$', views.LocationListView.as_view(), name='i4p-location-missing-list'),
)
| #-- encoding: utf-8 --
from django.conf.urls.defaults import patterns, url
from haystack.views import search_view_factory
import views
import ajax
urlpatterns = patterns('',
#url(r'^$', views.homepage, name='i4p-index'),
url(r'^homepage/ajax/slider/bestof/$', ajax.slider_bestof, name='i4p-homepage-ajax-slider-bestof'),
url(r'^homepage/ajax/slider/latest/$', ajax.slider_latest, name='i4p-homepage-ajax-slider-latest'),
url(r'^homepage/ajax/slider/commented/$', ajax.slider_most_commented, name='i4p-homepage-ajax-slider-commented'),
url(r'^history/check_version/(?P<pk>[\d]+)$', views.VersionActivityCheckView.as_view(), name='history-check-version'),
url(r'^search/', search_view_factory(view_class=views.SearchView), name='i4p-search'),
url(r'^location/(?P<location_id>\d+)', views.LocationEditView.as_view(), name='i4p-location-edit'),
url(r'^locations/$', views.LocationListView.as_view(), name='i4p-location-list'),
url(r'^locations/missing/(?P<missing_field_name>\w+)$', views.LocationListView.as_view(), name='i4p-location-missing-list'),
)
| Remove explicit link to homepage view in i4p_base | Remove explicit link to homepage view in i4p_base
| Python | agpl-3.0 | ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople | ---
+++
@@ -7,7 +7,7 @@
import ajax
urlpatterns = patterns('',
- url(r'^$', views.homepage, name='i4p-index'),
+ #url(r'^$', views.homepage, name='i4p-index'),
url(r'^homepage/ajax/slider/bestof/$', ajax.slider_bestof, name='i4p-homepage-ajax-slider-bestof'),
url(r'^homepage/ajax/slider/latest/$', ajax.slider_latest, name='i4p-homepage-ajax-slider-latest'),
url(r'^homepage/ajax/slider/commented/$', ajax.slider_most_commented, name='i4p-homepage-ajax-slider-commented'), |
5446b0cc9335a3fe6c88158c1b864cdc1b0988d5 | onestop/stopbins.py | onestop/stopbins.py | """Stop Bins."""
import util
import errors
import registry
import entities
class StopBin(object):
def __init__(self, prefix):
self.prefix = prefix
self._stops = {}
def stops(self):
return self._stops.values()
def add_stop(self, stop):
key = stop.onestop()
# New stop
if key not in self._stops:
self._stops[key] = stop
else:
self._stops[key].merge(stop)
@classmethod
def from_json(cls, data):
stopbin = cls(prefix=data['prefix'])
for feature in data['features']:
stop = entities.OnestopStop.from_json(feature)
stopbin.add_stop(stop)
return stopbin
def json(self):
return {
'type': 'FeatureCollection',
'properties': {},
'prefix': self.prefix,
'features': [
i.json()
for i in
sorted(self.stops(), key=lambda x:x.onestop())
]
}
| """Stop Bins."""
import util
import errors
import registry
import entities
class StopBin(object):
def __init__(self, prefix):
self.prefix = prefix
self._stops = {}
def stops(self):
return self._stops.values()
def add_stop(self, stop):
key = stop.onestop()
# New stop
if key not in self._stops:
self._stops[key] = stop
else:
self._stops[key].merge(stop)
return self._stops[key]
@classmethod
def from_json(cls, data):
stopbin = cls(prefix=data['prefix'])
for feature in data['features']:
stop = entities.OnestopStop.from_json(feature)
stopbin.add_stop(stop)
return stopbin
def json(self):
return {
'type': 'FeatureCollection',
'properties': {},
'prefix': self.prefix,
'features': [
i.json()
for i in
sorted(self.stops(), key=lambda x:x.onestop())
]
}
| Return added stop in StopBin.add_stop() | Return added stop in StopBin.add_stop()
| Python | mit | transitland/transitland-python-client,srthurman/transitland-python-client | ---
+++
@@ -19,6 +19,7 @@
self._stops[key] = stop
else:
self._stops[key].merge(stop)
+ return self._stops[key]
@classmethod
def from_json(cls, data): |
73877a82bf9b690827102d1a932a31af94ab78e9 | partner_event/models/res_partner.py | partner_event/models/res_partner.py | # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
| # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_attended_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
@api.one
@api.depends('registrations.state')
def _count_attended_registration(self):
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
| Revert last commit and tiggers _count_attended_registration method when one registrations.state changes | Revert last commit and tiggers _count_attended_registration method when one registrations.state changes
| Python | agpl-3.0 | open-synergy/event,open-synergy/event,Endika/event,Antiun/event | ---
+++
@@ -17,11 +17,15 @@
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
- compute='_count_registration', store=True)
+ compute='_count_attended_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
+
+ @api.one
+ @api.depends('registrations.state')
+ def _count_attended_registration(self):
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done')) |
e5d3f0f0295cb5943f7926e49da42565a7905c85 | dummy_celery_worker.py | dummy_celery_worker.py | import os
from celery import Celery
app = Celery('dummy_tasks', broker=os.environ['TEST_HADES_BROKER_URI'],
backend=os.environ['TEST_HADES_RESULT_BACKEND_URI'])
@app.task
def get_port_auth_attempts(nasipaddress, nasportid):
return ["Success!", "No success! :-(",
"Gotten: {}/{}".format(nasipaddress, nasportid)]
| import os
from datetime import datetime
from time import sleep
from celery import Celery
app = Celery('dummy_tasks', broker=os.environ['TEST_HADES_BROKER_URI'],
backend=os.environ['TEST_HADES_RESULT_BACKEND_URI'])
@app.task
def get_port_auth_attempts(nasipaddress, nasportid, limit=100):
if nasportid == 'magic_sleep':
# sleep for 10 seconds, which is longer than the default
sleep(10)
return []
if nasipaddress != '141.30.223.206' or nasportid != 'C6':
return []
return [
# (packettype, replymessage, username, auth_date, egress_vlan)
# TODO: What VLAN should there be on Auth-Reject? In any
# case, it will be unused.
("Auth-Reject", "", "00:de:ad:be:ef:00", datetime(2017, 4, 20, 18, 25), None),
("Auth-Access", "Wu5_untagged", "00:de:ad:be:ef:00", datetime(2017, 4, 20, 18, 20), 15),
("Auth-Access", "unknown", "00:de:ad:be:ef:01", datetime(2017, 4, 20, 18, 5), 1001),
("Auth-Access", "traffic", "00:de:ad:be:ef:00", datetime(2017, 4, 20, 18, 0), 1001),
][:limit]
| Implement correct function signature in dummy API and sleep trigger | Implement correct function signature in dummy API and sleep trigger
| Python | apache-2.0 | lukasjuhrich/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,lukasjuhrich/pycroft | ---
+++
@@ -1,11 +1,28 @@
import os
+from datetime import datetime
+from time import sleep
from celery import Celery
app = Celery('dummy_tasks', broker=os.environ['TEST_HADES_BROKER_URI'],
backend=os.environ['TEST_HADES_RESULT_BACKEND_URI'])
+
@app.task
-def get_port_auth_attempts(nasipaddress, nasportid):
- return ["Success!", "No success! :-(",
- "Gotten: {}/{}".format(nasipaddress, nasportid)]
+def get_port_auth_attempts(nasipaddress, nasportid, limit=100):
+ if nasportid == 'magic_sleep':
+ # sleep for 10 seconds, which is longer than the default
+ sleep(10)
+ return []
+
+ if nasipaddress != '141.30.223.206' or nasportid != 'C6':
+ return []
+ return [
+ # (packettype, replymessage, username, auth_date, egress_vlan)
+ # TODO: What VLAN should there be on Auth-Reject? In any
+ # case, it will be unused.
+ ("Auth-Reject", "", "00:de:ad:be:ef:00", datetime(2017, 4, 20, 18, 25), None),
+ ("Auth-Access", "Wu5_untagged", "00:de:ad:be:ef:00", datetime(2017, 4, 20, 18, 20), 15),
+ ("Auth-Access", "unknown", "00:de:ad:be:ef:01", datetime(2017, 4, 20, 18, 5), 1001),
+ ("Auth-Access", "traffic", "00:de:ad:be:ef:00", datetime(2017, 4, 20, 18, 0), 1001),
+ ][:limit] |
ca5851c681452e20a07434b74481860722077bb0 | server/setup.py | server/setup.py | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.txt')).read()
version = '0.1'
requires = ['pyramid', 'pyramid_debugtoolbar']
if __name__ == '__main__':
setup(name='pings',
version=version,
description='pings',
long_description=README,
classifiers=[
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Christian Hudon',
author_email='chrish@pianocktail.org',
url='https://github.com/lisa-lab/pings',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="pings",
scripts = ['leaderboards_server', 'storage_server'],
entry_points = """\
[paste.app_factory]
main = pings.web_server:main
""",
paster_plugins=['pyramid'],
)
| import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.txt')).read()
version = '0.1'
# Use requirements.txt for all requirements, at least for now.
requires = []
if __name__ == '__main__':
setup(name='pings',
version=version,
description='pings',
long_description=README,
classifiers=[
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Christian Hudon',
author_email='chrish@pianocktail.org',
url='https://github.com/lisa-lab/pings',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="pings",
scripts = ['leaderboards_server', 'storage_server'],
entry_points = """\
[paste.app_factory]
main = pings.web_server:main
""",
paster_plugins=['pyramid'],
)
| Use requirements.txt for all requirements, at least for now. | Use requirements.txt for all requirements, at least for now.
| Python | bsd-3-clause | lisa-lab/pings,lisa-lab/pings,lisa-lab/pings,lisa-lab/pings | ---
+++
@@ -6,7 +6,8 @@
README = open(os.path.join(here, 'README.txt')).read()
version = '0.1'
-requires = ['pyramid', 'pyramid_debugtoolbar']
+# Use requirements.txt for all requirements, at least for now.
+requires = []
if __name__ == '__main__':
@@ -37,4 +38,3 @@
""",
paster_plugins=['pyramid'],
)
- |
c6e130682712e8534e773036ba3d87c09b91ff1c | knowledge_repo/postprocessors/format_checks.py | knowledge_repo/postprocessors/format_checks.py | from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, "Required field `{}` missing from headers.".format(
field)
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
| from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, \
"Required field `{field}` missing from headers."
assert isinstance(headers[field], typ), \
f"Value for field `{field}` is of type " + \
f"{type(headers[field])}, and needs to be of type {typ}."
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
assert isinstance(headers[field], typ), \
f"Value for field `{field}` is of type " + \
f"{type(headers[field])}, and needs to be of type {typ}."
| Fix lint issues related to long lines | Fix lint issues related to long lines
| Python | apache-2.0 | airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo | ---
+++
@@ -9,11 +9,13 @@
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
- assert field in headers, "Required field `{}` missing from headers.".format(
- field)
- assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
- field, type(headers[field]), typ)
+ assert field in headers, \
+ "Required field `{field}` missing from headers."
+ assert isinstance(headers[field], typ), \
+ f"Value for field `{field}` is of type " + \
+ f"{type(headers[field])}, and needs to be of type {typ}."
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
- assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
- field, type(headers[field]), typ)
+ assert isinstance(headers[field], typ), \
+ f"Value for field `{field}` is of type " + \
+ f"{type(headers[field])}, and needs to be of type {typ}." |
bf264d5683c7fcab69e117f235fbe16298ac90b8 | wal_e/worker/wabs/wabs_deleter.py | wal_e/worker/wabs/wabs_deleter.py | from wal_e import retries
from wal_e.worker.base import _Deleter
class Deleter(_Deleter):
def __init__(self, wabs_conn, container):
super(Deleter, self).__init__()
self.wabs_conn = wabs_conn
self.container = container
@retries.retry()
def _delete_batch(self, page):
# Azure Blob Service has no concept of mass-delete, so we must nuke
# each blob one-by-one...
for blob in page:
self.wabs_conn.delete_blob(self.container, blob.name)
| from wal_e import retries
from wal_e import log_help
from wal_e.worker.base import _Deleter
try:
# New class name in the Azure SDK sometime after v1.0.
#
# See
# https://github.com/Azure/azure-sdk-for-python/blob/master/ChangeLog.txt
from azure.common import AzureMissingResourceHttpError
except ImportError:
# Backwards compatbility for older Azure drivers.
from azure import WindowsAzureMissingResourceError \
as AzureMissingResourceHttpError
logger = log_help.WalELogger(__name__)
class Deleter(_Deleter):
def __init__(self, wabs_conn, container):
super(Deleter, self).__init__()
self.wabs_conn = wabs_conn
self.container = container
@retries.retry()
def _delete_batch(self, page):
# Azure Blob Service has no concept of mass-delete, so we must nuke
# each blob one-by-one...
for blob in page:
try:
self.wabs_conn.delete_blob(self.container, blob.name)
except AzureMissingResourceHttpError:
logger.warning(
msg='failed while deleting resource',
detail='Blob {0} does not exist in container {1}.'.format(
blob.name, self.container))
| Fix infinite retry while deleting missing resource in WABS | Fix infinite retry while deleting missing resource in WABS
| Python | bsd-3-clause | wal-e/wal-e | ---
+++
@@ -1,5 +1,20 @@
from wal_e import retries
+from wal_e import log_help
from wal_e.worker.base import _Deleter
+
+try:
+ # New class name in the Azure SDK sometime after v1.0.
+ #
+ # See
+ # https://github.com/Azure/azure-sdk-for-python/blob/master/ChangeLog.txt
+ from azure.common import AzureMissingResourceHttpError
+except ImportError:
+ # Backwards compatbility for older Azure drivers.
+ from azure import WindowsAzureMissingResourceError \
+ as AzureMissingResourceHttpError
+
+
+logger = log_help.WalELogger(__name__)
class Deleter(_Deleter):
@@ -14,4 +29,10 @@
# Azure Blob Service has no concept of mass-delete, so we must nuke
# each blob one-by-one...
for blob in page:
- self.wabs_conn.delete_blob(self.container, blob.name)
+ try:
+ self.wabs_conn.delete_blob(self.container, blob.name)
+ except AzureMissingResourceHttpError:
+ logger.warning(
+ msg='failed while deleting resource',
+ detail='Blob {0} does not exist in container {1}.'.format(
+ blob.name, self.container)) |
62845279b46d6f4394e05e666fe459a427bdd358 | enthought/qt/QtCore.py | enthought/qt/QtCore.py | import os
qt_api = os.environ.get('QT_API', 'pyqt')
if qt_api == 'pyqt':
from PyQt4.QtCore import *
from PyQt4.QtCore import pyqtSignal as Signal
from PyQt4.Qt import QCoreApplication
from PyQt4.Qt import Qt
else:
from PySide.QtCore import *
| import os
qt_api = os.environ.get('QT_API', 'pyqt')
if qt_api == 'pyqt':
from PyQt4.QtCore import *
from PyQt4.QtCore import pyqtSignal as Signal
from PyQt4.Qt import QCoreApplication
from PyQt4.Qt import Qt
# Emulate PySide version metadata.
__version__ = QT_VERSION_STR
__version_info__ = tuple(map(int, QT_VERSION_STR.split('.')))
else:
from PySide.QtCore import *
| Add PySide-style version metadata when PyQt4 is present. | Add PySide-style version metadata when PyQt4 is present.
| Python | bsd-3-clause | burnpanck/traits,burnpanck/traits | ---
+++
@@ -9,5 +9,9 @@
from PyQt4.Qt import QCoreApplication
from PyQt4.Qt import Qt
+ # Emulate PySide version metadata.
+ __version__ = QT_VERSION_STR
+ __version_info__ = tuple(map(int, QT_VERSION_STR.split('.')))
+
else:
from PySide.QtCore import * |
cc87cf3967e14274b7819f5424b80bd7e491f0ce | alg_kruskal_minimum_spanning_tree.py | alg_kruskal_minimum_spanning_tree.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def kruskal():
"""Kruskal's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): TBD.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def kruskal():
"""Kruskal's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): TBD.
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Kruskal\'s minimum spanning tree:')
pass
if __name__ == '__main__':
main()
| Add weighted graph in main() | Add weighted graph in main()
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | ---
+++
@@ -11,7 +11,18 @@
def main():
- pass
+ w_graph_d = {
+ 'a': {'b': 1, 'd': 4, 'e': 3},
+ 'b': {'a': 1, 'd': 4, 'e': 2},
+ 'c': {'e': 4, 'f': 5},
+ 'd': {'a': 4, 'b': 4, 'e': 4},
+ 'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
+ 'f': {'c': 5, 'e': 7}
+ }
+ print('w_graph_d:\n{}'.format(w_graph_d))
+
+ print('Kruskal\'s minimum spanning tree:')
+ pass
if __name__ == '__main__': |
4b5f8e14db9cd157d1b3b616726b1c9fb1b3c9b5 | demos/py_simple/rotate90.py | demos/py_simple/rotate90.py | #!/usr/bin/env python
import sys
import gfxprim.core as core
import gfxprim.loaders as loaders
import gfxprim.filters as filters
def main():
if len(sys.argv) != 3:
print("USAGE: %s imput_image output_image" % sys.argv[0]);
sys.exit(1)
# Turns on debug messages
core.SetDebugLevel(10);
# Load Image
src = loaders.LoadImage(sys.argv[1], None)
# Rotate by 90 degrees
res = filters.FilterRotate90Alloc(src, None)
# Save Image
res.Save(sys.argv[2])
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import sys
import gfxprim.core as core
import gfxprim.loaders as loaders
import gfxprim.filters as filters
def main():
if len(sys.argv) != 3:
print("USAGE: %s imput_image output_image" % sys.argv[0]);
sys.exit(1)
# Turns on debug messages
core.SetDebugLevel(10);
# Load Image
src = loaders.LoadImage(sys.argv[1], None)
# Rotate by 90 degrees
res = filters.FilterRotate90_Alloc(src, None)
# Save Image
res.Save(sys.argv[2])
if __name__ == '__main__':
main()
| Fix python example after the API update. | py_simple: Fix python example after the API update.
| Python | lgpl-2.1 | gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim | ---
+++
@@ -16,7 +16,7 @@
# Load Image
src = loaders.LoadImage(sys.argv[1], None)
# Rotate by 90 degrees
- res = filters.FilterRotate90Alloc(src, None)
+ res = filters.FilterRotate90_Alloc(src, None)
# Save Image
res.Save(sys.argv[2])
|
283d7299c732b80d504e971424b18996719fdf80 | dsub/_dsub_version.py | dsub/_dsub_version.py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.6'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.7.dev0'
| Update dsub version to 0.3.7.dev0 | Update dsub version to 0.3.7.dev0
PiperOrigin-RevId: 281987296
| Python | apache-2.0 | DataBiosphere/dsub,DataBiosphere/dsub | ---
+++
@@ -26,4 +26,4 @@
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
-DSUB_VERSION = '0.3.6'
+DSUB_VERSION = '0.3.7.dev0' |
a5ae06630ef96d1093e4498e0e5437c0a7e65bfa | parse.py | parse.py | from PIL import Image
import sys
import pyocr
import pyocr.builders
tools = pyocr.get_available_tools()
if len(tools) == 0:
print("Error: No OCR tool found")
sys.exit(1)
# should be 'Tesseract (sh)'
tool = tools[0]
orig_image = Image.open('test.png')
# crop to only the section with the number of problems solved
cropped_image = orig_image.crop((47, 40, 97, 60))
# double the size of the image so the OCR has more to go on
resized_image = cropped_image.resize((100, 40), Image.ANTIALIAS)
digits = tool.image_to_string(
resized_image,
builder=pyocr.tesseract.DigitBuilder()
)
print(digits)
| from PIL import Image
import sys
import pyocr
import pyocr.builders
image_loc = ' '.join(sys.argv[1:])
tools = pyocr.get_available_tools()
if len(tools) == 0:
print("Error: No OCR tool found")
sys.exit(1)
# should be 'Tesseract (sh)'
tool = tools[0]
orig_image = Image.open(image_loc)
# crop to only the section with the number of problems solved
cropped_image = orig_image.crop((47, 40, 97, 60))
# double the size of the image so the OCR has more to go on
resized_image = cropped_image.resize((100, 40), Image.ANTIALIAS)
digits = tool.image_to_string(
resized_image,
builder=pyocr.tesseract.DigitBuilder()
)
print(digits)
| Allow user to specify image file w argv | Allow user to specify image file w argv
| Python | bsd-2-clause | iandioch/euler-foiler | ---
+++
@@ -4,6 +4,7 @@
import pyocr
import pyocr.builders
+image_loc = ' '.join(sys.argv[1:])
tools = pyocr.get_available_tools()
if len(tools) == 0:
@@ -13,7 +14,7 @@
# should be 'Tesseract (sh)'
tool = tools[0]
-orig_image = Image.open('test.png')
+orig_image = Image.open(image_loc)
# crop to only the section with the number of problems solved
cropped_image = orig_image.crop((47, 40, 97, 60)) |
b67a5daaa7efc946aebcdfdabbe201057af4aef5 | globus_sdk/version.py | globus_sdk/version.py | # single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "0.6.0"
| # single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "0.7.0"
| Update to v0.7.0 for release | Update to v0.7.0 for release
| Python | apache-2.0 | aaschaer/globus-sdk-python,sirosen/globus-sdk-python,globus/globus-sdk-python,globus/globus-sdk-python,globusonline/globus-sdk-python | ---
+++
@@ -1,3 +1,3 @@
# single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
-__version__ = "0.6.0"
+__version__ = "0.7.0" |
a16889f353873e3d08a24440b9aa83177ffd001f | engine.py | engine.py | #!/usr/bin/env python
import json
import sys
import os # For os.path and the like
class DictWrapper(object):
def __init__(self, d):
self.__dict__ = d
def eval_script(self):
return eval(self.script) # With self as context
d = json.load(sys.stdin)
dw = DictWrapper(d)
json.dump(dw.eval_script(), sys.stdout)
| #!/usr/bin/env python
import json
import sys
import os # For os.path and the like
class DictWrapper(object):
def __init__(self, d):
self.__dict__ = d
def eval_script(self):
return eval(self.script) # With self as context
def __getattr__(self, attr):
return None
if __name__ == '__main__':
input_dict = json.load(sys.stdin)
dw = DictWrapper(input_dict)
json.dump(dw.eval_script(), sys.stdout)
| Implement __getattr__ to handle KeyErrors | Implement __getattr__ to handle KeyErrors
| Python | mit | dleehr/py-expr-engine | ---
+++
@@ -2,15 +2,21 @@
import json
import sys
-import os # For os.path and the like
+import os # For os.path and the like
+
class DictWrapper(object):
def __init__(self, d):
self.__dict__ = d
+
def eval_script(self):
- return eval(self.script) # With self as context
+ return eval(self.script) # With self as context
+
+ def __getattr__(self, attr):
+ return None
-d = json.load(sys.stdin)
-dw = DictWrapper(d)
-json.dump(dw.eval_script(), sys.stdout)
+if __name__ == '__main__':
+ input_dict = json.load(sys.stdin)
+ dw = DictWrapper(input_dict)
+ json.dump(dw.eval_script(), sys.stdout) |
fc7beded3d286d831df29b8b32614b2eb56ef206 | enasearch/__main__.py | enasearch/__main__.py | #!/usr/bin/env python
import click
import ebisearch
from pprint import pprint
@click.group()
def main():
pass
@click.command('get_results', short_help='Get list of results')
def get_results():
"""Return the list of domains in EBI"""
ebisearch.get_results(verbose=True)
@click.command('get_filter_fields', short_help='Get filter fields')
@click.option(
'--result',
help='Id of a result (accessible with get_results)')
def get_filter_fields(result):
"""Get the filter fields of a result to build a query"""
ebisearch.get_filter_fields(verbose=True)
main.add_command(get_results)
main.add_command(get_filter_fields)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import click
import ebisearch
from pprint import pprint
@click.group()
def main():
pass
@click.command('get_results', short_help='Get list of results')
def get_results():
"""Return the list of domains in EBI"""
ebisearch.get_results(verbose=True)
@click.command('get_filter_fields', short_help='Get filter fields')
@click.option(
'--result',
help='Id of a result (accessible with get_results)')
def get_filter_fields(result):
"""Get the filter fields of a result to build a query"""
ebisearch.get_filter_fields(verbose=True)
@click.command('get_filter_types', short_help='Get filter types')
def get_filter_types():
"""Get the types of filters usable to build a query"""
ebisearch.get_filter_types(verbose=True)
main.add_command(get_results)
main.add_command(get_filter_fields)
main.add_command(get_filter_types)
if __name__ == "__main__":
main()
| Add function for get filter types | Add function for get filter types
| Python | mit | bebatut/enasearch | ---
+++
@@ -25,8 +25,15 @@
ebisearch.get_filter_fields(verbose=True)
+@click.command('get_filter_types', short_help='Get filter types')
+def get_filter_types():
+ """Get the types of filters usable to build a query"""
+ ebisearch.get_filter_types(verbose=True)
+
+
main.add_command(get_results)
main.add_command(get_filter_fields)
+main.add_command(get_filter_types)
if __name__ == "__main__": |
9fcfd8e13b5c4684a1cb3890427662ded2d28c24 | examples/get_dataset.py | examples/get_dataset.py | #!/usr/bin/env python3
#
# This script is used for downloading the dataset used by the examples.
# Dataset used: UCI / Pima Indians Diabetes (in libsvm format)
import os
import urllib.request
DATASET_URL = 'http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/diabetes'
TARGET_PATH = os.path.dirname(os.path.realpath(__file__)) + '/dataset.txt'
def main():
urllib.request.urlretrieve(DATASET_URL, TARGET_PATH)
if __name__ == '__main__': main()
| #!/usr/bin/env python3
#
# This script is used for downloading the dataset used by the examples.
# Dataset used: Statlog / Letter (in libsvm format)
import os
import urllib.request
import random
DATASET_URL = 'http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multiclass/letter.scale'
DATASET_SIZE = 1000
TARGET_PATH = os.path.dirname(os.path.realpath(__file__)) + '/dataset.txt'
def main():
rows = list(urllib.request.urlopen(DATASET_URL))
selected = random.sample(rows, DATASET_SIZE)
with open(TARGET_PATH, 'wb') as f:
for row in selected:
f.write(row)
if __name__ == '__main__': main()
| Change dataset used in example (letter) | Change dataset used in example (letter)
XXX: UncertaintySampling(le) weird?
| Python | bsd-2-clause | ntucllab/libact,ntucllab/libact,ntucllab/libact | ---
+++
@@ -1,18 +1,24 @@
#!/usr/bin/env python3
#
# This script is used for downloading the dataset used by the examples.
-# Dataset used: UCI / Pima Indians Diabetes (in libsvm format)
+# Dataset used: Statlog / Letter (in libsvm format)
import os
import urllib.request
+import random
-DATASET_URL = 'http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/diabetes'
+DATASET_URL = 'http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multiclass/letter.scale'
+DATASET_SIZE = 1000
TARGET_PATH = os.path.dirname(os.path.realpath(__file__)) + '/dataset.txt'
def main():
- urllib.request.urlretrieve(DATASET_URL, TARGET_PATH)
+ rows = list(urllib.request.urlopen(DATASET_URL))
+ selected = random.sample(rows, DATASET_SIZE)
+ with open(TARGET_PATH, 'wb') as f:
+ for row in selected:
+ f.write(row)
if __name__ == '__main__': main() |
32a44354c0a5421c2b8a8ab9d63a26e36ddd6158 | sponsorship_switzerland/migrations/12.0.1.0.2/pre-migration.py | sponsorship_switzerland/migrations/12.0.1.0.2/pre-migration.py | from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
# Associate already created toilets fund to new xml record
covid_fund = env["product.template"].search(
[("default_code", "=", "toilet")]
)
if covid_fund:
openupgrade.add_xmlid(
env.cr,
"sponsorship_switzerland",
"product_template_covid",
"product.template",
covid_fund.id,
)
| from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
# Associate already created toilets fund to new xml record
covid_fund = env["product.template"].search(
[("default_code", "=", "coronavirus")]
)
if covid_fund:
openupgrade.add_xmlid(
env.cr,
"sponsorship_switzerland",
"product_template_covid",
"product.template",
covid_fund.id,
)
| Fix migration of covid product | Fix migration of covid product
| Python | agpl-3.0 | eicher31/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland | ---
+++
@@ -8,7 +8,7 @@
# Associate already created toilets fund to new xml record
covid_fund = env["product.template"].search(
- [("default_code", "=", "toilet")]
+ [("default_code", "=", "coronavirus")]
)
if covid_fund: |
8923d10fc831afe7ade5dad4e14167f3525396b6 | scripts/nipy_4dto3D.py | scripts/nipy_4dto3D.py | #!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import sys
import nipy.io.imageformats as nii
if __name__ == '__main__':
try:
fname = sys.argv[1]
except IndexError:
raise OSError('Expecting 4d image filename')
img = nii.load(fname)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(fname)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
| #!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import nipy.externals.argparse as argparse
import nipy.io.imageformats as nii
def main():
# create the parser
parser = argparse.ArgumentParser()
# add the arguments
parser.add_argument('filename', type=str,
help='4D image filename')
# parse the command line
args = parser.parse_args()
img = nii.load(args.filename)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(args.filename)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
if __name__ == '__main__':
main()
| Use argparse for 4D to 3D | Use argparse for 4D to 3D | Python | bsd-3-clause | yarikoptic/NiPy-OLD,yarikoptic/NiPy-OLD | ---
+++
@@ -3,22 +3,30 @@
analyze, MINC, at the moment, as nifti 3D files '''
import os
-import sys
+import nipy.externals.argparse as argparse
import nipy.io.imageformats as nii
-if __name__ == '__main__':
- try:
- fname = sys.argv[1]
- except IndexError:
- raise OSError('Expecting 4d image filename')
- img = nii.load(fname)
+def main():
+ # create the parser
+ parser = argparse.ArgumentParser()
+ # add the arguments
+ parser.add_argument('filename', type=str,
+ help='4D image filename')
+ # parse the command line
+ args = parser.parse_args()
+ img = nii.load(args.filename)
imgs = nii.four_to_three(img)
- froot, ext = os.path.splitext(fname)
+ froot, ext = os.path.splitext(args.filename)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
+
+
+if __name__ == '__main__':
+ main()
+
|
ffff9d10862391289e4fba8ac120983ac6368200 | setup.py | setup.py | from setuptools import setup
setup(
name='cmsplugin-biography',
version='0.0.1',
packages=['cmsplugin_biography', 'cmsplugin_biography.migrations', ],
install_requires=[
'django-cms',
'djangocms-text-ckeditor==1.0.9',
'easy-thumbnails==1.2',
],
author='Kevin Richardson',
author_email='kevin@magically.us',
description='A Django CMS plugin that manages and displays biographical information',
long_description=open('README.rst').read(),
license='MIT',
url='http://github.com/kfr2/cmsplugin-biography',
include_package_data=True
)
| from setuptools import setup
setup(
name='cmsplugin-biography',
version='0.0.1',
packages=['cmsplugin_biography', 'cmsplugin_biography.migrations', ],
install_requires=[
'django-cms',
'djangocms-text-ckeditor==1.0.9',
'easy-thumbnails==1.2',
],
author='Kevin Richardson',
author_email='kevin@magically.us',
description='A Django CMS plugin that manages and displays biographical information',
long_description=open('README.rst').read(),
license='MIT',
url='http://github.com/kfr2/cmsplugin-biography',
include_package_data=True,
zip_safe=False
)
| Mark package as not zip_safe | Mark package as not zip_safe
This package needs access to its templates to function. Thus, the
zip_safe flag has been set to False to tell setuptools to not
install the package's egg as a zip file.
See http://pythonhosted.org/distribute/setuptools.html#setting-the-zip-safe-flag
for further information.
| Python | mit | kfr2/cmsplugin-biography | ---
+++
@@ -16,5 +16,6 @@
long_description=open('README.rst').read(),
license='MIT',
url='http://github.com/kfr2/cmsplugin-biography',
- include_package_data=True
+ include_package_data=True,
+ zip_safe=False
) |
427d3625f26b4a7f3533162e949ed941fa3fe89e | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='panoptescli',
version='1.1-pre',
url='https://github.com/zooniverse/panoptes-cli',
author='Adam McMaster',
author_email='adam@zooniverse.org',
description=(
'A command-line client for Panoptes, the API behind the Zooniverse'
),
packages=find_packages(),
include_package_data=True,
install_requires=[
'Click>=6.7,<6.8',
'PyYAML>=3.12,<5.2',
'panoptes-client>=1.0,<2.0',
],
entry_points='''
[console_scripts]
panoptes=panoptes_cli.scripts.panoptes:cli
''',
)
| from setuptools import setup, find_packages
setup(
name='panoptescli',
version='1.1-pre',
url='https://github.com/zooniverse/panoptes-cli',
author='Adam McMaster',
author_email='adam@zooniverse.org',
description=(
'A command-line client for Panoptes, the API behind the Zooniverse'
),
packages=find_packages(),
include_package_data=True,
install_requires=[
'Click>=6.7,<7.1',
'PyYAML>=3.12,<5.2',
'panoptes-client>=1.0,<2.0',
],
entry_points='''
[console_scripts]
panoptes=panoptes_cli.scripts.panoptes:cli
''',
)
| Update click requirement from <6.8,>=6.7 to >=6.7,<7.1 | Update click requirement from <6.8,>=6.7 to >=6.7,<7.1
Updates the requirements on [click](https://github.com/pallets/click) to permit the latest version.
- [Release notes](https://github.com/pallets/click/releases)
- [Changelog](https://github.com/pallets/click/blob/master/docs/changelog.rst)
- [Commits](https://github.com/pallets/click/commits/7.0)
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com> | Python | apache-2.0 | zooniverse/panoptes-cli | ---
+++
@@ -12,7 +12,7 @@
packages=find_packages(),
include_package_data=True,
install_requires=[
- 'Click>=6.7,<6.8',
+ 'Click>=6.7,<7.1',
'PyYAML>=3.12,<5.2',
'panoptes-client>=1.0,<2.0',
], |
ebb3ea0d72835c4acdc38ba241cf8fd4f828c5cd | setup.py | setup.py | from distutils.core import setup, Extension
import sys
ext_modules = [
Extension('classified._platform',
['src/classified._platform.c'],
extra_compile_args=[
'-DPLATFORM_%s' % (sys.platform.upper()),
'-Wunused',
]
)
]
setup(
name = 'classified',
version = '0.0.2',
author = 'Wijnand Modderman',
author_email = 'maze@pyth0n.org',
description = 'Classified data scanner',
license = 'MIT',
keywords = 'classified sensitive pan pci',
packages = [
'classified',
'classified.probe',
],
data_files = [
('/etc/classified', 'etc/classified.conf.sample'),
],
scripts = ['bin/classified'],
ext_modules = ext_modules,
)
| from distutils.core import setup, Extension
import sys
ext_modules = [
Extension('classified._platform',
['src/classified._platform.c'],
extra_compile_args=[
'-DPLATFORM_%s' % (sys.platform.upper()),
'-Wunused',
]
)
]
setup(
name = 'classified',
version = '0.0.2',
author = 'Wijnand Modderman',
author_email = 'maze@pyth0n.org',
description = 'Classified data scanner',
license = 'MIT',
keywords = 'classified sensitive pan pci',
packages = [
'classified',
'classified.probe',
'classified.probe.pan',
'classified.probe.password',
'classified.probe.ssl',
],
data_files = [
('/etc/classified', 'etc/classified.conf.sample'),
],
scripts = ['bin/classified'],
ext_modules = ext_modules,
)
| Move probes to their own directory | Move probes to their own directory
| Python | mit | tehmaze/classified,tehmaze/classified,tehmaze/classified | ---
+++
@@ -24,6 +24,9 @@
packages = [
'classified',
'classified.probe',
+ 'classified.probe.pan',
+ 'classified.probe.password',
+ 'classified.probe.ssl',
],
data_files = [
('/etc/classified', 'etc/classified.conf.sample'), |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.